In [ ]:
import numpy as np
import pandas as pd

import matplotlib as mpl
import matplotlib.pyplot as plt

from scipy.spatial.distance import cdist
from sklearn import cluster
from yellowbrick.cluster import KElbowVisualizer
from sklearn.preprocessing import normalize
from sklearn.metrics.cluster import adjusted_rand_score
from sklearn.metrics import silhouette_score
from sklearn.cluster import MeanShift, estimate_bandwidth, KMeans
from sklearn.cluster import AgglomerativeClustering
from sklearn.metrics import v_measure_score 
from sklearn.preprocessing import normalize
import scipy.cluster.hierarchy as shc



from sklearn.preprocessing import MinMaxScaler
from sklearn.model_selection import train_test_split
from keras.preprocessing.sequence import TimeseriesGenerator
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from keras import layers
from keras.layers import BatchNormalization
from keras import models
from keras import optimizers
from keras import callbacks
from keras.layers import Dropout
from sklearn import metrics
from sklearn.metrics import classification_report
import math
from sklearn.metrics import mean_squared_error


%matplotlib inline
In [ ]:
from google.colab import drive
drive.mount('/content/drive')
Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount("/content/drive", force_remount=True).
In [ ]:
dfGC = pd.read_csv('/content/drive/My Drive/Final thesis/reshapedGC.csv', parse_dates = ['Datetime'], dayfirst = True,na_filter=False)

rename the 1st column manually

In [ ]:
dfGC['minute'] = dfGC['Datetime'].apply(lambda x: x.minute)
dfGC['hour'] = dfGC['Datetime'].apply(lambda x: x.hour + x.minute/60)
dfGC['weekday'] = dfGC['Datetime'].apply(lambda x: x.weekday() < 5)
In [ ]:
dfGCwk = dfGC[dfGC.weekday == True]
dfGCwkn = dfGC[dfGC.weekday == False]
In [ ]:
import calendar # To have human-readable names for the days of the week
dfGCwk['Datetime'].apply(lambda x: calendar.day_name[x.weekday()]).value_counts()
Out[ ]:
Monday       2496
Tuesday      2496
Friday       2496
Wednesday    2496
Thursday     2496
Name: Datetime, dtype: int64
In [ ]:
dfGCwk_dpat = dfGCwk.groupby(by=dfGCwk.hour).mean()
dfGCwk_dpat = dfGCwk_dpat.drop(columns = ['minute','weekday'])
dfGCwk_dpat.head()
Out[ ]:
1 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 ... 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300
hour
0.0 0.222935 0.165338 0.120996 0.123481 0.302277 0.126831 0.145562 0.048365 0.063223 0.253042 0.205865 0.110827 0.298831 0.073062 0.227154 0.212712 0.175212 0.100004 0.338038 0.664842 0.140188 0.086096 0.182577 0.105188 0.496704 0.307104 0.114604 0.083008 0.041046 0.182096 0.079588 0.213862 0.245592 0.284785 0.186596 0.122138 0.077915 0.072285 0.105977 0.130854 ... 0.184681 0.275619 0.308700 0.048258 0.128881 0.075658 0.582835 0.113150 0.679381 0.190877 0.324508 0.135958 0.087642 0.263900 0.167000 0.116073 0.124735 0.131654 0.136850 0.198635 0.557042 0.219265 0.069373 0.110765 0.362785 0.211708 0.365396 0.115992 0.936788 0.142515 0.342677 0.051912 0.195527 0.417269 0.162115 0.057319 0.215327 0.121881 0.225358 0.806408
0.5 0.219331 0.123304 0.115092 0.115104 0.291981 0.115015 0.133600 0.048573 0.063423 0.200573 0.195654 0.097638 0.246304 0.075615 0.226988 0.228581 0.142765 0.100231 0.330012 0.600538 0.113473 0.081969 0.165535 0.085827 0.486985 0.260188 0.111877 0.076746 0.038681 0.175288 0.077104 0.194212 0.229015 0.248277 0.174335 0.109919 0.067212 0.069508 0.100104 0.132019 ... 0.172354 0.248135 0.279015 0.045523 0.124354 0.077773 0.595485 0.091373 0.655327 0.186485 0.253258 0.122077 0.086288 0.246746 0.162408 0.120242 0.122535 0.132350 0.131038 0.212504 0.654869 0.180781 0.062669 0.097438 0.324258 0.204981 0.331485 0.085242 0.839300 0.142081 0.298362 0.064477 0.427785 0.519250 0.141700 0.052746 0.213727 0.105469 0.209750 0.426954
1.0 0.218473 0.104942 0.109058 0.113081 0.278323 0.109669 0.137885 0.043550 0.060412 0.180985 0.186004 0.078458 0.188388 0.073938 0.225258 0.217581 0.109496 0.101458 0.313727 0.507577 0.130542 0.080227 0.159373 0.079154 0.490954 0.209177 0.109754 0.078977 0.038862 0.167538 0.075612 0.177931 0.214362 0.237508 0.164573 0.103169 0.068235 0.068650 0.099585 0.131319 ... 0.160508 0.233608 0.261300 0.046185 0.116985 0.077250 0.557165 0.087031 0.647200 0.184496 0.228581 0.118877 0.083477 0.245100 0.165873 0.131523 0.122935 0.125746 0.122285 0.235858 0.656092 0.152027 0.061812 0.091777 0.262173 0.198646 0.306462 0.076712 0.755058 0.135135 0.238488 0.075358 0.327688 0.602685 0.141415 0.052823 0.207031 0.092392 0.208162 0.250992
1.5 0.219085 0.083031 0.107588 0.110404 0.286865 0.104531 0.128912 0.042888 0.061088 0.175854 0.186862 0.076954 0.164808 0.072777 0.221031 0.212423 0.097608 0.100331 0.314612 0.433492 0.134838 0.080346 0.154073 0.078227 0.522412 0.166277 0.108177 0.074308 0.039535 0.163500 0.076385 0.166562 0.198458 0.235465 0.142658 0.103469 0.068038 0.069008 0.100842 0.120946 ... 0.156565 0.224727 0.253092 0.043596 0.127296 0.060650 0.681615 0.092562 0.647335 0.183550 0.189212 0.135842 0.081908 0.240654 0.165712 0.130473 0.121838 0.130412 0.124150 0.236300 0.625688 0.126338 0.060327 0.090846 0.209023 0.196923 0.275712 0.075592 0.708281 0.121096 0.190085 0.068954 0.331454 0.588092 0.116819 0.049912 0.200696 0.086569 0.200154 0.209296
2.0 0.197812 0.070327 0.106177 0.109462 0.278754 0.107527 0.139727 0.037769 0.060458 0.168996 0.176873 0.077115 0.138404 0.069619 0.213512 0.205508 0.087877 0.103200 0.291912 0.345973 0.146577 0.077915 0.154046 0.079242 0.572819 0.149896 0.109300 0.069954 0.039281 0.155735 0.081169 0.169777 0.182388 0.233496 0.137462 0.098981 0.068750 0.069162 0.101358 0.123469 ... 0.159504 0.220500 0.246758 0.044069 0.120508 0.061596 0.867542 0.090165 0.642212 0.181235 0.172677 0.113619 0.081265 0.239962 0.161527 0.127950 0.124031 0.128173 0.120296 0.194196 0.514569 0.114912 0.057727 0.088965 0.197604 0.191812 0.233588 0.080015 0.715308 0.119827 0.167569 0.086712 0.405562 0.532423 0.112985 0.050758 0.196892 0.081638 0.202200 0.198019

5 rows × 299 columns

In [ ]:
X_wk = dfGCwk.groupby(by=dfGCwk.Datetime).mean()
X_wk = X_wk.drop(columns = ['minute','hour','weekday'])
X_wk = X_wk.transpose()
X_wk.head()
Out[ ]:
Datetime 2012-07-02 00:00:00 2012-07-02 00:30:00 2012-07-02 01:00:00 2012-07-02 01:30:00 2012-07-02 02:00:00 2012-07-02 02:30:00 2012-07-02 03:00:00 2012-07-02 03:30:00 2012-07-02 04:00:00 2012-07-02 04:30:00 2012-07-02 05:00:00 2012-07-02 05:30:00 2012-07-02 06:00:00 2012-07-02 06:30:00 2012-07-02 07:00:00 2012-07-02 07:30:00 2012-07-02 08:00:00 2012-07-02 08:30:00 2012-07-02 09:00:00 2012-07-02 09:30:00 2012-07-02 10:00:00 2012-07-02 10:30:00 2012-07-02 11:00:00 2012-07-02 11:30:00 2012-07-02 12:00:00 2012-07-02 12:30:00 2012-07-02 13:00:00 2012-07-02 13:30:00 2012-07-02 14:00:00 2012-07-02 14:30:00 2012-07-02 15:00:00 2012-07-02 15:30:00 2012-07-02 16:00:00 2012-07-02 16:30:00 2012-07-02 17:00:00 2012-07-02 17:30:00 2012-07-02 18:00:00 2012-07-02 18:30:00 2012-07-02 19:00:00 2012-07-02 19:30:00 ... 2013-06-28 04:00:00 2013-06-28 04:30:00 2013-06-28 05:00:00 2013-06-28 05:30:00 2013-06-28 06:00:00 2013-06-28 06:30:00 2013-06-28 07:00:00 2013-06-28 07:30:00 2013-06-28 08:00:00 2013-06-28 08:30:00 2013-06-28 09:00:00 2013-06-28 09:30:00 2013-06-28 10:00:00 2013-06-28 10:30:00 2013-06-28 11:00:00 2013-06-28 11:30:00 2013-06-28 12:00:00 2013-06-28 12:30:00 2013-06-28 13:00:00 2013-06-28 13:30:00 2013-06-28 14:00:00 2013-06-28 14:30:00 2013-06-28 15:00:00 2013-06-28 15:30:00 2013-06-28 16:00:00 2013-06-28 16:30:00 2013-06-28 17:00:00 2013-06-28 17:30:00 2013-06-28 18:00:00 2013-06-28 18:30:00 2013-06-28 19:00:00 2013-06-28 19:30:00 2013-06-28 20:00:00 2013-06-28 20:30:00 2013-06-28 21:00:00 2013-06-28 21:30:00 2013-06-28 22:00:00 2013-06-28 22:30:00 2013-06-28 23:00:00 2013-06-28 23:30:00
1 0.309 0.082 0.059 0.097 0.290 0.155 0.703 0.688 0.044 0.395 0.640 0.090 0.065 0.078 0.386 0.177 0.491 0.072 0.345 0.341 0.104 0.624 0.429 0.966 0.273 0.311 0.128 0.060 0.345 0.063 0.086 0.076 0.070 0.326 0.068 0.191 0.472 1.056 0.696 0.353 ... 0.096 0.101 1.126 0.451 0.724 0.071 0.223 0.201 0.459 0.050 0.128 0.039 0.125 0.565 0.903 0.742 0.110 0.033 0.108 0.036 0.104 0.027 0.760 1.622 1.463 0.796 0.728 0.273 0.296 0.213 0.256 0.184 0.325 0.263 0.489 0.265 0.102 0.448 1.192 0.528
3 1.044 0.992 0.448 0.071 0.069 0.060 0.047 0.045 0.125 0.085 0.050 0.076 0.049 0.046 0.070 0.054 0.031 0.052 0.053 0.037 0.056 0.067 0.050 0.040 0.068 0.067 0.045 0.045 0.062 0.064 0.256 1.022 0.915 0.931 0.938 1.184 1.041 0.818 0.753 0.732 ... 0.043 0.045 0.045 0.043 0.066 0.071 0.223 0.420 0.050 0.043 0.040 0.041 0.042 0.042 0.056 0.061 0.061 0.062 0.060 0.059 0.061 0.055 0.057 0.055 0.057 0.106 0.642 1.107 0.827 0.097 0.093 0.057 0.054 0.050 0.049 0.047 0.046 0.046 0.046 0.044
4 0.094 0.091 0.087 0.080 0.093 0.098 0.096 0.099 0.086 0.099 0.098 0.098 0.087 0.091 0.089 0.093 0.090 0.105 0.103 0.091 0.101 0.252 0.128 0.142 0.158 0.206 0.255 0.154 0.159 0.310 0.285 0.312 0.450 0.227 0.236 0.235 0.238 0.769 2.544 2.474 ... 0.107 0.108 0.106 0.106 0.100 0.099 0.110 0.109 0.206 0.134 0.259 0.263 0.256 0.231 0.172 0.357 0.176 0.157 0.141 0.113 0.119 0.202 0.240 0.378 0.483 0.304 0.357 0.244 0.257 0.268 0.254 0.265 0.273 1.961 1.144 0.138 0.134 0.124 0.108 0.108
5 0.058 0.093 0.065 0.081 0.079 0.083 0.060 0.099 0.059 0.113 0.077 0.244 0.082 0.081 0.058 0.086 0.086 0.077 0.092 0.087 0.121 0.100 0.110 0.079 0.101 0.078 0.093 0.135 0.185 0.342 0.377 0.424 0.182 0.502 1.176 0.639 0.630 0.676 0.451 0.287 ... 0.077 0.081 0.104 0.094 0.237 0.793 0.789 0.715 0.423 0.357 0.348 0.373 0.341 0.284 0.115 0.060 0.102 0.129 0.078 0.142 0.899 0.785 1.135 1.077 1.052 0.910 1.823 3.006 2.044 1.349 1.170 1.197 1.203 1.137 1.530 1.677 1.686 0.706 0.187 0.182
6 0.152 0.392 0.329 0.318 0.312 0.235 0.279 0.218 0.266 0.257 0.205 0.297 0.221 0.220 0.329 1.049 1.016 0.878 1.064 0.438 0.435 0.485 0.484 0.596 0.530 0.424 0.214 0.713 0.559 1.084 0.560 0.500 2.336 1.265 1.864 1.326 0.929 0.846 1.213 1.369 ... 0.199 0.234 0.164 0.149 0.242 0.193 0.723 0.915 0.520 0.344 0.820 0.472 0.304 0.522 0.480 0.435 0.293 0.421 0.331 0.335 0.350 0.260 0.322 0.345 0.369 0.305 0.469 1.370 1.188 1.042 0.845 0.896 0.966 0.922 1.316 1.363 0.812 0.601 0.376 0.413

5 rows × 12480 columns

In [ ]:
dfGCwkn_dpat = dfGCwkn.groupby(by=dfGCwkn.hour).mean()
dfGCwkn_dpat = dfGCwkn_dpat.drop(columns = ['minute','weekday'])
dfGCwkn_dpat.head()
Out[ ]:
1 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 ... 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300
hour
0.0 0.272543 0.099829 0.126362 0.150467 0.330733 0.135838 0.196429 0.049333 0.070429 0.298971 0.246152 0.152924 0.345181 0.075876 0.318276 0.212695 0.151857 0.097743 0.477429 0.683238 0.112171 0.102629 0.239210 0.118924 0.570562 0.338905 0.121381 0.090743 0.038352 0.195267 0.089143 0.271581 0.246200 0.359867 0.193219 0.146429 0.081990 0.071752 0.102695 0.127733 ... 0.228248 0.322133 0.434410 0.042838 0.135162 0.057838 0.524571 0.103581 0.705457 0.187371 0.345667 0.155848 0.083076 0.376543 0.175171 0.121752 0.126067 0.132543 0.122905 0.215886 0.655276 0.246657 0.078514 0.179257 0.415181 0.238638 0.435381 0.106286 0.900962 0.144029 0.346314 0.088210 0.207914 0.456400 0.193419 0.073886 0.222448 0.135467 0.342714 0.497124
0.5 0.252333 0.099410 0.113476 0.142362 0.281067 0.133305 0.153743 0.045686 0.063390 0.251848 0.229581 0.114362 0.247171 0.077543 0.299990 0.245229 0.125543 0.097762 0.460610 0.590990 0.113476 0.091257 0.225362 0.089886 0.551619 0.283276 0.119105 0.093790 0.038552 0.184333 0.075038 0.234448 0.232143 0.306048 0.172410 0.138962 0.067476 0.075667 0.102686 0.126476 ... 0.217000 0.285019 0.392990 0.051229 0.130286 0.057086 0.587495 0.092000 0.679038 0.187981 0.277276 0.131790 0.084076 0.327552 0.161771 0.128495 0.126714 0.130000 0.123848 0.213419 0.722610 0.197133 0.071524 0.168524 0.388733 0.219457 0.392095 0.059524 0.813057 0.139190 0.336000 0.053057 0.437267 0.586029 0.184114 0.069629 0.203114 0.115848 0.246476 0.427390
1.0 0.226190 0.078952 0.110676 0.134790 0.288667 0.112819 0.141533 0.042781 0.067419 0.221743 0.213790 0.109390 0.237971 0.076952 0.277629 0.242676 0.107619 0.100086 0.473390 0.475952 0.117895 0.085343 0.199714 0.080867 0.622924 0.211181 0.112019 0.076933 0.037133 0.180695 0.071590 0.217324 0.215886 0.290086 0.161200 0.158810 0.069752 0.076152 0.106076 0.125829 ... 0.217810 0.231362 0.360171 0.050686 0.123762 0.061314 0.567552 0.094752 0.663029 0.182981 0.250314 0.120314 0.088371 0.305752 0.166371 0.129410 0.127590 0.128390 0.131010 0.219267 0.721410 0.163600 0.067429 0.140410 0.338381 0.219038 0.329048 0.079790 0.754876 0.130486 0.312105 0.076752 0.322448 0.644695 0.139200 0.064343 0.207257 0.100771 0.201190 0.329257
1.5 0.207590 0.074324 0.115019 0.128333 0.280971 0.104829 0.135590 0.039181 0.060324 0.199829 0.197457 0.113219 0.190552 0.069410 0.259362 0.217286 0.099114 0.097295 0.443400 0.408067 0.148505 0.088514 0.175486 0.073143 0.595181 0.174210 0.112771 0.069829 0.037229 0.167552 0.072143 0.198143 0.199095 0.268838 0.141790 0.150295 0.073276 0.076533 0.103038 0.131943 ... 0.190705 0.226990 0.340381 0.046200 0.132543 0.066581 0.630162 0.088552 0.644714 0.180886 0.215048 0.169990 0.085438 0.286714 0.157971 0.134200 0.124190 0.134571 0.116629 0.198543 0.694019 0.134467 0.071914 0.111914 0.317552 0.216257 0.291067 0.074343 0.613190 0.127238 0.263086 0.098152 0.337029 0.573305 0.115752 0.063952 0.196457 0.093657 0.185267 0.291495
2.0 0.218057 0.062200 0.107429 0.128714 0.290210 0.107848 0.135876 0.036610 0.064400 0.178800 0.191429 0.091981 0.167981 0.070467 0.251238 0.220514 0.091390 0.101400 0.413990 0.347714 0.131752 0.085381 0.170486 0.073619 0.630933 0.143352 0.109819 0.077067 0.039000 0.159971 0.079086 0.175962 0.184267 0.257257 0.124019 0.169648 0.071333 0.068886 0.106019 0.122648 ... 0.171219 0.228133 0.336343 0.046086 0.130552 0.073629 0.743886 0.088543 0.643257 0.180276 0.180543 0.118686 0.079390 0.275886 0.161162 0.126067 0.122381 0.127924 0.120019 0.173638 0.550086 0.124019 0.066448 0.094010 0.243819 0.196981 0.264924 0.060343 0.694257 0.118952 0.203171 0.085686 0.398019 0.498943 0.098457 0.058467 0.193105 0.083495 0.184848 0.247219

5 rows × 299 columns

In [ ]:
X_wkn = dfGCwkn.groupby(by=dfGCwkn.Datetime).mean()
X_wkn = X_wkn.drop(columns = ['minute','hour','weekday'])
X_wkn = X_wkn.transpose()
X_wkn.head()
Out[ ]:
Datetime 2012-07-01 00:00:00 2012-07-01 00:30:00 2012-07-01 01:00:00 2012-07-01 01:30:00 2012-07-01 02:00:00 2012-07-01 02:30:00 2012-07-01 03:00:00 2012-07-01 03:30:00 2012-07-01 04:00:00 2012-07-01 04:30:00 2012-07-01 05:00:00 2012-07-01 05:30:00 2012-07-01 06:00:00 2012-07-01 06:30:00 2012-07-01 07:00:00 2012-07-01 07:30:00 2012-07-01 08:00:00 2012-07-01 08:30:00 2012-07-01 09:00:00 2012-07-01 09:30:00 2012-07-01 10:00:00 2012-07-01 10:30:00 2012-07-01 11:00:00 2012-07-01 11:30:00 2012-07-01 12:00:00 2012-07-01 12:30:00 2012-07-01 13:00:00 2012-07-01 13:30:00 2012-07-01 14:00:00 2012-07-01 14:30:00 2012-07-01 15:00:00 2012-07-01 15:30:00 2012-07-01 16:00:00 2012-07-01 16:30:00 2012-07-01 17:00:00 2012-07-01 17:30:00 2012-07-01 18:00:00 2012-07-01 18:30:00 2012-07-01 19:00:00 2012-07-01 19:30:00 ... 2013-06-30 04:00:00 2013-06-30 04:30:00 2013-06-30 05:00:00 2013-06-30 05:30:00 2013-06-30 06:00:00 2013-06-30 06:30:00 2013-06-30 07:00:00 2013-06-30 07:30:00 2013-06-30 08:00:00 2013-06-30 08:30:00 2013-06-30 09:00:00 2013-06-30 09:30:00 2013-06-30 10:00:00 2013-06-30 10:30:00 2013-06-30 11:00:00 2013-06-30 11:30:00 2013-06-30 12:00:00 2013-06-30 12:30:00 2013-06-30 13:00:00 2013-06-30 13:30:00 2013-06-30 14:00:00 2013-06-30 14:30:00 2013-06-30 15:00:00 2013-06-30 15:30:00 2013-06-30 16:00:00 2013-06-30 16:30:00 2013-06-30 17:00:00 2013-06-30 17:30:00 2013-06-30 18:00:00 2013-06-30 18:30:00 2013-06-30 19:00:00 2013-06-30 19:30:00 2013-06-30 20:00:00 2013-06-30 20:30:00 2013-06-30 21:00:00 2013-06-30 21:30:00 2013-06-30 22:00:00 2013-06-30 22:30:00 2013-06-30 23:00:00 2013-06-30 23:30:00
1 0.855 0.786 0.604 0.544 0.597 0.612 1.245 0.665 0.076 0.140 0.657 0.337 0.048 0.101 0.050 0.219 0.334 0.164 0.374 0.201 0.077 0.380 0.051 0.110 0.043 0.109 0.292 0.109 0.042 0.107 0.043 0.409 0.133 0.099 0.087 0.087 0.385 0.211 0.329 0.374 ... 0.030 0.161 0.960 0.559 0.735 0.106 0.033 0.293 0.368 0.266 0.061 0.098 0.089 0.076 0.511 0.721 0.188 0.110 0.261 0.091 1.428 1.673 1.599 1.702 1.383 0.917 0.167 0.227 0.448 0.335 0.212 0.213 0.243 0.190 0.260 0.274 0.509 0.166 0.861 0.631
3 0.965 0.927 1.359 0.060 0.059 0.056 0.067 0.065 0.121 0.063 0.087 0.067 0.052 0.050 0.057 0.072 0.057 0.042 0.058 0.067 0.059 0.040 0.059 0.064 0.064 0.514 0.364 0.109 0.254 0.051 0.044 0.065 0.122 0.066 0.109 0.040 0.071 0.069 0.042 0.072 ... 0.051 0.057 0.056 0.057 0.056 0.063 0.050 0.048 0.106 0.098 0.054 0.049 0.054 0.063 0.062 0.064 0.051 0.051 0.051 0.051 0.059 0.058 0.059 0.059 0.059 0.057 0.052 0.055 0.052 0.052 0.050 0.050 0.060 0.112 0.088 0.056 0.056 0.065 0.063 0.052
4 0.084 0.084 0.082 0.084 0.086 0.091 0.083 0.091 0.093 0.096 0.095 0.095 0.083 0.092 0.097 0.091 0.085 0.075 0.336 0.189 0.180 0.180 0.196 0.210 0.214 0.196 0.189 0.267 0.373 0.375 0.364 0.339 0.292 0.290 0.290 0.294 1.336 2.503 2.500 0.365 ... 0.106 0.103 0.107 0.106 0.103 0.109 0.105 0.265 0.226 0.210 0.162 0.155 0.155 0.149 0.273 0.244 0.207 0.162 0.171 0.196 0.271 0.219 0.270 0.267 0.261 0.247 0.255 0.268 0.255 0.280 0.252 0.273 0.271 0.253 0.183 0.134 0.137 0.140 0.112 0.107
5 0.108 0.098 0.105 0.075 0.102 0.072 0.075 0.092 0.067 0.076 0.083 0.075 0.089 0.075 0.059 0.177 0.158 0.184 0.154 0.065 0.089 0.168 0.128 0.097 0.075 0.097 0.081 0.142 0.149 0.086 0.096 0.077 0.064 0.101 0.061 0.078 0.088 0.061 0.105 0.069 ... 0.062 0.033 0.051 0.053 0.050 0.102 0.081 0.126 0.152 0.157 0.163 1.039 1.059 0.511 0.499 0.075 0.065 0.128 0.166 0.105 0.099 0.561 1.872 1.351 1.647 2.250 2.621 1.360 1.972 2.152 2.201 1.993 1.187 0.814 0.404 0.390 0.413 1.068 1.084 0.999
6 0.260 0.253 0.180 0.220 0.171 0.241 0.165 0.199 0.211 0.142 0.244 0.160 0.178 0.222 0.289 1.068 0.984 0.955 0.771 0.575 0.271 0.261 0.282 0.399 0.336 1.202 0.653 0.319 0.421 0.414 0.618 0.720 0.474 0.772 1.270 1.359 0.980 1.204 1.576 1.294 ... 0.217 0.178 0.210 0.224 0.217 0.169 0.179 0.235 0.631 0.936 0.862 0.992 0.692 0.887 0.831 1.405 0.796 0.696 0.519 0.467 0.236 0.396 0.309 0.258 0.310 0.408 1.050 0.945 1.260 1.018 1.115 0.910 1.022 1.011 0.912 1.063 1.613 1.044 0.979 0.910

5 rows × 5040 columns

In [ ]:
loc_6h = mpl.ticker.MultipleLocator(6)
In [ ]:
ax = dfGCwk_dpat.plot(color='k', alpha=0.15, lw=0.5)
dfGCwk_dpat.mean(axis=1).plot(color='tab:green', lw=3, alpha=0.7)

ax.xaxis.set_major_locator(loc_6h)
plt.legend(ax.lines[-2:], ['1 customer', 'average'])
ax.set(
    xlabel='hour of day',
    ylabel='power (kW)',
    ylim=(-0.1, 3.5),
    title='Daily consumption over 2012-2013'
    );

plt.tight_layout()
In [ ]:
ax = dfGCwkn_dpat.plot(color='k', alpha=0.15, lw=0.5)
dfGCwk_dpat.mean(axis=1).plot(color='tab:green', lw=3, alpha=0.7)

ax.xaxis.set_major_locator(loc_6h)
plt.legend(ax.lines[-2:], ['1 customer', 'average'])
ax.set(
    xlabel='hour of day',
    ylabel='power (kW)',
    ylim=(-0.1, 3.5),
    title='Daily consumption over 2012-2013'
    );

plt.tight_layout()
In [ ]:
def elbow_method_wk(dataframe, min_k, max_k):
    # Try all k's between min_k and max_k
    cluster_nums = range(min_k, max_k + 1)
    
    # Array of K-means algorithm objects
    k_mean_algs = [cluster.KMeans(n_clusters = k) for k in cluster_nums]
    
    # Array with the results of the K-mean algorithms
    k_mean_res = [alg.fit(dataframe) for alg in k_mean_algs]
    
    # Array with the centroids for each value of k
    centroids = [res.cluster_centers_ for res in k_mean_res]
    
    # Euclidean distance between the points of the cluster and the centroids
    distances = [cdist(dataframe, centroid, 'euclidean') for centroid in centroids]
    
    # Get the closest centroid (and the corresponding distance)
    min_indices = [np.argmin(distance, axis = 1) for distance in distances]
    min_distances = [np.min(distance, axis = 1) for distance in distances]
    
    # Calculate the average squared distance
    avg_sum_squares = [sum(dist ** 2) / X_wk.shape[0] for dist in min_distances]

    # Plot the elbow graph
    fig = plt.figure()
    ax = fig.add_subplot(111)
    ax.plot(cluster_nums, avg_sum_squares, 'b*-',color='tab:green')
    plt.grid(True)
    plt.xlabel('Number of clusters')
    plt.ylabel('Average sum of squares within cluster')
    plt.show()
    
    return (k_mean_algs, k_mean_res)
In [ ]:
def elbow_method_wkn(dataframe, min_k, max_k):
    # Try all k's between min_k and max_k
    cluster_nums = range(min_k, max_k + 1)
    
    # Array of K-means algorithm objects
    k_mean_algs = [cluster.KMeans(n_clusters = k) for k in cluster_nums]
    
    # Array with the results of the K-mean algorithms
    k_mean_res = [alg.fit(dataframe) for alg in k_mean_algs]
    
    # Array with the centroids for each value of k
    centroids = [res.cluster_centers_ for res in k_mean_res]
    
    # Euclidean distance between the points of the cluster and the centroids
    distances = [cdist(dataframe, centroid, 'euclidean') for centroid in centroids]
    
    # Get the closest centroid (and the corresponding distance)
    min_indices = [np.argmin(distance, axis = 1) for distance in distances]
    min_distances = [np.min(distance, axis = 1) for distance in distances]
    
    # Calculate the average squared distance
    avg_sum_squares = [sum(dist ** 2) / X_wkn.shape[0] for dist in min_distances]

    # Plot the elbow graph
    fig = plt.figure()
    ax = fig.add_subplot(111)
    ax.plot(cluster_nums, avg_sum_squares, 'b*-',color='tab:green')
    plt.grid(True)
    plt.xlabel('Number of clusters')
    plt.ylabel('Average sum of squares within cluster')
    plt.show()
    
    return (k_mean_algs, k_mean_res)
In [ ]:
k_algs, k_res = elbow_method_wk(X_wk,2,12)
In [ ]:
k_algs, k_res = elbow_method_wkn(X_wkn,2,12)
In [ ]:
km_wk = KMeans().fit(X_wk)
pd.Series.value_counts(km_wk.labels_)

visualizer = KElbowVisualizer(km_wk, k=(2,12), timings=False)
visualizer.fit(X_wk)      
visualizer.show() 
/Users/khaleel/Desktop/COnda/anaconda3/lib/python3.8/site-packages/yellowbrick/utils/kneed.py:140: YellowbrickWarning: No 'knee' or 'elbow point' detected This could be due to bad clustering, no actual clusters being formed etc.
  warnings.warn(warning_message, YellowbrickWarning)
/Users/khaleel/Desktop/COnda/anaconda3/lib/python3.8/site-packages/yellowbrick/cluster/elbow.py:343: YellowbrickWarning: No 'knee' or 'elbow' point detected, pass `locate_elbow=False` to remove the warning
  warnings.warn(warning_message, YellowbrickWarning)
Out[ ]:
<matplotlib.axes._subplots.AxesSubplot at 0x7fa8051fe190>
In [ ]:
km_wkn = KMeans().fit(X_wkn)
pd.Series.value_counts(km_wkn.labels_)

visualizer = KElbowVisualizer(km_wkn, k=(2,12), timings=False)
visualizer.fit(X_wkn)      
visualizer.show() 
Out[ ]:
<matplotlib.axes._subplots.AxesSubplot at 0x7fa805fe3c70>
In [ ]:
X_wk_v = dfGCwk
X_wk_v = dfGCwk.drop(columns = ['minute','hour','weekday'])
X_wk_v = X_wk_v.groupby(X_wk_v.Datetime).mean()
len(X_wk_v)
Out[ ]:
12480
In [ ]:
X_wkn_v = dfGCwkn
X_wkn_v = dfGCwkn.drop(columns = ['minute','hour','weekday'])
X_wkn_v = X_wkn_v.groupby(X_wkn_v.Datetime).mean()
len(X_wkn_v)
Out[ ]:
5040
In [ ]:
X_wk_v.head()
Out[ ]:
1 3 4 5 6 7 8 9 10 11 ... 291 292 293 294 295 296 297 298 299 300
Datetime
2012-07-02 00:00:00 0.309 1.044 0.094 0.058 0.152 0.079 0.079 0.026 0.753 0.291 ... 0.428 0.031 0.155 0.122 0.099 0.036 0.158 0.146 0.098 0.888
2012-07-02 00:30:00 0.082 0.992 0.091 0.093 0.392 0.044 0.135 0.022 0.350 0.318 ... 0.212 0.081 0.219 0.209 0.091 0.042 0.223 0.117 0.043 0.553
2012-07-02 01:00:00 0.059 0.448 0.087 0.065 0.329 0.078 0.078 0.012 0.052 0.304 ... 0.263 0.025 0.201 0.139 0.084 0.019 0.104 0.073 0.094 0.371
2012-07-02 01:30:00 0.097 0.071 0.080 0.081 0.318 0.033 0.137 0.036 0.016 0.315 ... 0.131 0.156 0.173 0.127 0.070 0.052 0.154 0.064 0.051 0.222
2012-07-02 02:00:00 0.290 0.069 0.093 0.079 0.312 0.075 0.081 0.012 0.046 0.334 ... 0.152 0.363 0.209 0.142 0.100 0.036 0.085 0.067 0.087 0.099

5 rows × 299 columns

In [ ]:
v_scores = [] 

N_Clusters = [2, 3, 4, 5, 6, 7, 8] 


km_2_wk = KMeans(2).fit(X_wk_v)
True_lables1 =km_2_wk.labels_
labels1 = km_2_wk.predict(X_wk_v)
v_scores.append(v_measure_score(X_wk_v['3'], labels1)) 

km_3_wk = KMeans(3).fit(X_wk_v)
True_lables2 =km_3_wk.labels_
labels2 = km_3_wk.predict(X_wk_v)
v_scores.append(v_measure_score(X_wk_v['3'], labels2)) 

km_4_wk = KMeans(4).fit(X_wk_v)
True_lables3 =km_4_wk.labels_
labels3 = km_4_wk.predict(X_wk_v)
v_scores.append(v_measure_score(X_wk_v['3'], labels3)) 

km_5_wk = KMeans(5).fit(X_wk_v)
True_lables4 =km_5_wk.labels_
labels4 = km_5_wk.predict(X_wk_v)
v_scores.append(v_measure_score(X_wk_v['3'], labels4)) 

km_6_wk = KMeans(6).fit(X_wk_v)
True_lables5 =km_6_wk.labels_
labels5 = km_6_wk.predict(X_wk_v)
v_scores.append(v_measure_score(X_wk_v['3'], labels5)) 

km_7_wk = KMeans(7).fit(X_wk_v)
True_lables6 =km_7_wk.labels_
labels6 = km_7_wk.predict(X_wk_v)
v_scores.append(v_measure_score(X_wk_v['3'], labels6)) 

km_8_wk = KMeans(8).fit(X_wk_v)
True_lables7 =km_8_wk.labels_
labels7 = km_7_wk.predict(X_wk_v)
v_scores.append(v_measure_score(X_wk_v['3'], labels7)) 
In [ ]:
#Plotting a Bar Graph to compare the models 
plt.bar(N_Clusters, v_scores) 
plt.xlabel('Number of Clusters') 
plt.ylabel('V-Measure Score') 
plt.title('Comparison of different Clustering Models') 
plt.show() 
In [ ]:
print('k = 3 the score is ',silhouette_score(X_wk_v, km_3_wk.labels_, metric = 'euclidean'),'\n',
      'k = 4 the score is ',silhouette_score(X_wk_v, km_4_wk.labels_, metric = 'euclidean'),'\n',
      'k = 5 the score is ',silhouette_score(X_wk_v, km_5_wk.labels_, metric = 'euclidean'),'\n',
      'k = 6 the score is ',silhouette_score(X_wk_v, km_6_wk.labels_, metric = 'euclidean'),'\n',
      'k = 7 the score is ',silhouette_score(X_wk_v, km_7_wk.labels_, metric = 'euclidean'))
k = 3 the score is  0.14543612279052234 
 k = 4 the score is  0.026297425834688377 
 k = 5 the score is  0.03036688673222016 
 k = 6 the score is  0.03653599404203 
 k = 7 the score is  0.024553935809803088
In [ ]:
v_scores = [] 

N_Clusters = [2, 3, 4, 5, 6, 7, 8] 


km_2_wkn = KMeans(2).fit(X_wkn_v)
True_lables1 =km_2_wkn.labels_
labels1 = km_2_wkn.predict(X_wkn_v)
v_scores.append(v_measure_score(X_wkn_v['1'], labels1)) 

km_3_wkn = KMeans(3).fit(X_wkn_v)
True_lables2 =km_3_wkn.labels_
labels2 = km_3_wkn.predict(X_wkn_v)
v_scores.append(v_measure_score(X_wkn_v['1'], labels2)) 

km_4_wkn = KMeans(4).fit(X_wkn_v)
True_lables3 =km_4_wkn.labels_
labels3 = km_4_wkn.predict(X_wkn_v)
v_scores.append(v_measure_score(X_wkn_v['1'], labels3)) 

km_5_wkn = KMeans(5).fit(X_wkn_v)
True_lables4 =km_5_wkn.labels_
labels4 = km_5_wkn.predict(X_wkn_v)
v_scores.append(v_measure_score(X_wkn_v['1'], labels4)) 

km_6_wkn = KMeans(6).fit(X_wkn_v)
True_lables5 =km_6_wkn.labels_
labels5 = km_6_wkn.predict(X_wkn_v)
v_scores.append(v_measure_score(X_wkn_v['1'], labels5)) 

km_7_wkn = KMeans(7).fit(X_wkn_v)
True_lables6 =km_7_wkn.labels_
labels6 = km_7_wkn.predict(X_wkn_v)
v_scores.append(v_measure_score(X_wkn_v['1'], labels6)) 

km_8_wkn = KMeans(8).fit(X_wkn_v)
True_lables7 =km_8_wkn.labels_
labels7 = km_7_wkn.predict(X_wkn_v)
v_scores.append(v_measure_score(X_wkn_v['1'], labels7)) 
In [ ]:
#Plotting a Bar Graph to compare the models 
plt.bar(N_Clusters, v_scores) 
plt.xlabel('Number of Clusters') 
plt.ylabel('V-Measure Score') 
plt.title('Comparison of different Clustering Models') 
plt.show() 
In [ ]:
print('k = 3 the score is ',silhouette_score(X_wkn_v, km_3_wkn.labels_, metric = 'euclidean'),'\n',
      'k = 4 the score is ',silhouette_score(X_wkn_v, km_4_wkn.labels_, metric = 'euclidean'),'\n',
      'k = 5 the score is ',silhouette_score(X_wkn_v, km_5_wkn.labels_, metric = 'euclidean'),'\n',
      'k = 6 the score is ',silhouette_score(X_wkn_v, km_6_wkn.labels_, metric = 'euclidean'),'\n',
      'k = 7 the score is ',silhouette_score(X_wkn_v, km_7_wkn.labels_, metric = 'euclidean'))
k = 3 the score is  0.05065174495435142 
 k = 4 the score is  0.05892454210624547 
 k = 5 the score is  0.053359891861663254 
 k = 6 the score is  0.03759696117911779 
 k = 7 the score is  0.03799151153769567
In [ ]:
%%time
km_3_wk = KMeans(3).fit(X_wk)
pd.Series.value_counts(km_3_wk.labels_)
CPU times: user 2.68 s, sys: 46.9 ms, total: 2.73 s
Wall time: 806 ms
Out[ ]:
2    121
0    120
1     58
dtype: int64
In [ ]:
plt.plot(X_wk.columns, km_3_wk.cluster_centers_[0], alpha=0.7, lw=1, color = 'blue',linestyle='dotted')
plt.gca()
plt.tight_layout()
#plt.legend()
plt.xlabel('Timeline') 
plt.ylabel('Energy Consumption (kW)') 
plt.title('Cluster 0') 
Out[ ]:
Text(0.5, 1.0, 'Cluster 0')
In [ ]:
plt.plot(X_wk.columns, km_3_wk.cluster_centers_[1], alpha=0.7, lw=1, color = 'green',linestyle='dotted')
plt.gca()
plt.tight_layout()
#plt.legend()
plt.xlabel('Timeline') 
plt.ylabel('Energy Consumption (kW)') 
plt.title('Cluster 0') 
Out[ ]:
Text(0.5, 1.0, 'Cluster 0')
In [ ]:
plt.plot(X_wk.columns, km_3_wk.cluster_centers_[2], alpha=0.7, lw=1, color = 'red',linestyle='dotted')
plt.gca()
plt.tight_layout()
#plt.legend()
plt.xlabel('Timeline') 
plt.ylabel('Energy Consumption (kW)') 
plt.title('Cluster 0') 
Out[ ]:
Text(0.5, 1.0, 'Cluster 0')
In [ ]:
plt.figure(figsize=(7,5))


plt.plot(X_wk.columns, km_3_wk.cluster_centers_[2],alpha=0.8,lw = 1,color = 'red')
plt.plot(X_wk.columns, km_3_wk.cluster_centers_[1],alpha=0.8,lw = 1,color = 'green')
plt.plot(X_wk.columns, km_3_wk.cluster_centers_[0], alpha=0.8, lw=1, color = 'blue')

plt.gca()
plt.tight_layout()
#plt.legend()
plt.xlabel('Timeline') 
plt.ylabel('Energy Consumption (kW)') 
plt.title('All Clusters overlapping') 
Out[ ]:
Text(0.5, 1.0, 'All Clusters overlapping')
In [ ]:
%%time
km_4_wkn = KMeans(4).fit(X_wkn)
pd.Series.value_counts(km_4_wkn.labels_)
CPU times: user 1.26 s, sys: 61.8 ms, total: 1.33 s
Wall time: 476 ms
Out[ ]:
3    152
2     89
1     56
0      2
dtype: int64
In [ ]:
plt.plot(X_wkn.columns, km_4_wkn.cluster_centers_[0], alpha=0.7, lw=1, color = 'blue',linestyle='dotted')
plt.gca()
plt.tight_layout()
#plt.legend()
plt.xlabel('Timeline') 
plt.ylabel('Energy Consumption (kW)') 
plt.title('Cluster 0') 
Out[ ]:
Text(0.5, 1.0, 'Cluster 0')
In [ ]:
plt.plot(X_wkn.columns, km_4_wkn.cluster_centers_[1], alpha=0.7, lw=1, color = 'green',linestyle='dotted')
plt.gca()
plt.tight_layout()
#plt.legend()
plt.xlabel('Timeline') 
plt.ylabel('Energy Consumption (kW)') 
plt.title('Cluster 0') 
Out[ ]:
Text(0.5, 1.0, 'Cluster 0')
In [ ]:
plt.plot(X_wkn.columns, km_4_wkn.cluster_centers_[2], alpha=0.7, lw=1, color = 'red',linestyle='dotted')
plt.gca()
plt.tight_layout()
#plt.legend()
plt.xlabel('Timeline') 
plt.ylabel('Energy Consumption (kW)') 
plt.title('Cluster 0') 
Out[ ]:
Text(0.5, 1.0, 'Cluster 0')
In [ ]:
plt.plot(X_wkn.columns, km_4_wkn.cluster_centers_[3], alpha=0.7, lw=1, color = 'purple')
plt.gca()
plt.tight_layout()
#plt.legend()
plt.xlabel('Timeline') 
plt.ylabel('Energy Consumption (kW)') 
plt.title('Cluster 0') 
Out[ ]:
Text(0.5, 1.0, 'Cluster 0')
In [ ]:
plt.figure(figsize=(10,5))
plt.plot(X_wkn.columns, km_4_wkn.cluster_centers_[0], alpha=0.9, lw=3, color = 'blue')
plt.plot(X_wkn.columns, km_4_wkn.cluster_centers_[2], alpha=0.9, lw=3, color = 'red')
plt.plot(X_wkn.columns, km_4_wkn.cluster_centers_[1], alpha=0.9, lw=3, color = 'green')
plt.plot(X_wkn.columns, km_4_wkn.cluster_centers_[3], alpha=0.9, lw=3, color = 'purple')



plt.gca()
plt.tight_layout()
#plt.legend()
plt.xlabel('Timeline') 
plt.ylabel('Energy Consumption (kW)') 
plt.title('All Clusters overlapping') 
Out[ ]:
Text(0.5, 1.0, 'All Clusters overlapping')
In [ ]:
actual_clus = km_3_wk.labels_
predicted_clus = km_3_wk.predict(X_wk)
In [ ]:
adjusted_rand_score(actual_clus, predicted_clus)
#####Perfect labeling would be scored 1 and bad labelling or independent labelling is scored 0 or negative.
Out[ ]:
1.0
In [ ]:
#########  specify 
#########  t_hod = dfGCwk_dpat.index

t_hod = 12480
def plot_daily_cluster(cl, X, **ax_kwarg):
    '''plot clusters of classifier `cl` with data `X`'''
    labels = cl.labels_
    
    # samples, colored with their membership
    for k in np.unique(labels):
        color = 'C{}'.format(k) if k>=0 else 'black'
        plt.plot(t_hod, X.iloc[labels == k, :].T,
                 color=color, alpha=0.3, lw=0.5,
                 #drawstyle='steps-post',
                );

    # cluster centers
    for k in np.unique(labels):
        n_member = np.sum(labels == k)
        if k>=0:
            color = 'C{}'.format(k)
            plt.plot(t_hod, cl.cluster_centers_[k],
                     label='{} ({})'.format(k, n_member),
                     color=color, lw=3);

    ax = plt.gca()
    ax.set(
        xlim=(0, 24),
        xlabel='hour of day'
    )
    ax.xaxis.set_major_locator(loc_6h);
    ax.set(**ax_kwarg)
    plt.tight_layout()
    plt.legend()
In [ ]:
estimate_bandwidth(X_wk.values, quantile=0.5)
Out[ ]:
49.33280903977709
In [ ]:
estimate_bandwidth(X_wkn.values, quantile=0.5)
Out[ ]:
33.565369030704225
In [ ]:
#######Mean shift clustering (long time to compute)
In [ ]:
%%time
ms_wk = MeanShift(bandwidth=75, cluster_all=True).fit(X_wk)
pd.Series.value_counts(ms_wk.labels_)
Out[ ]:
0    297
2      1
1      1
dtype: int64
In [ ]:
#ms_wkn = MeanShift(bandwidth=8, cluster_all=True).fit(X_wkn)
#pd.Series.value_counts(ms.labels_)
In [ ]:
#########Hierachichal CLustering (Comparitively long to kmeans)
In [ ]:
%%time
hc_wk = AgglomerativeClustering(n_clusters = 3, affinity = 'euclidean', linkage ='ward')
X_hc_wk=hc_wk.fit(X_wk)
pd.Series.value_counts(X_hc_wk.labels_)
CPU times: user 975 ms, sys: 13.8 ms, total: 989 ms
Wall time: 1.01 s
Out[ ]:
2    143
1    100
0     56
dtype: int64
In [ ]:
%%time
hc_wkn = AgglomerativeClustering(n_clusters = 4, affinity = 'euclidean', linkage ='ward')
X_hc_wkn = hc_wkn.fit(X_wkn)
pd.Series.value_counts(X_hc_wkn.labels_)
CPU times: user 368 ms, sys: 4.64 ms, total: 373 ms
Wall time: 375 ms
Out[ ]:
2    147
3     75
1     50
0     27
dtype: int64
In [ ]:
cls_list = km_3_wk.labels_
X_wk['cluster'] = cls_list
In [ ]:
cls_list = km_4_wkn.labels_
X_wkn['cluster'] = cls_list
In [ ]:
print(X_wk['cluster'].value_counts(), '\n',
X_wkn['cluster'].value_counts())
2    121
0    120
1     58
Name: cluster, dtype: int64 
 3    152
2     89
1     56
0      2
Name: cluster, dtype: int64

Partitioning the data according to clusters

In [ ]:
X_wk_C1 = X_wk[X_wk.cluster == 0]
X_wk_C2 = X_wk[X_wk.cluster == 1]
X_wk_C3 = X_wk[X_wk.cluster == 2]
#X_wk_C4 = X_wk[X_wk.cluster == 3]
#X_wk_C5 = X_wk[X_wk.cluster == 4]
In [ ]:
X_wkn_C1 = X_wkn[X_wkn.cluster == 0]
X_wkn_C2 = X_wkn[X_wkn.cluster == 1]
X_wkn_C3 = X_wkn[X_wkn.cluster == 2]
X_wkn_C4 = X_wkn[X_wkn.cluster == 3]
In [ ]:
X_wk_C1 = X_wk_C1.drop(columns = 'cluster')
X_wk_C1 = X_wk_C1.transpose()
X_wk_C1.head()
Out[ ]:
11 12 13 16 24 27 29 31 32 34 ... 273 274 275 280 281 286 287 288 289 290
Datetime
2012-07-02 00:00:00 0.291 0.176 0.160 0.329 0.137 0.847 0.100 0.275 0.056 0.250 ... 0.103 0.446 0.138 0.252 1.297 0.170 0.950 0.053 0.741 0.067
2012-07-02 00:30:00 0.318 0.323 0.159 0.345 0.126 0.506 0.098 0.294 0.063 0.156 ... 0.108 0.273 0.150 0.543 0.648 0.154 0.925 0.049 0.734 0.090
2012-07-02 01:00:00 0.304 0.223 0.131 0.366 0.127 0.265 0.050 0.291 0.064 0.113 ... 0.050 0.474 0.150 0.446 0.671 0.123 0.875 0.044 0.744 0.076
2012-07-02 01:30:00 0.315 0.271 0.158 0.347 0.133 0.249 0.064 0.265 0.063 0.131 ... 0.083 0.528 0.194 0.150 0.628 0.183 0.913 0.012 0.216 0.086
2012-07-02 02:00:00 0.334 0.134 0.160 0.329 0.129 0.286 0.082 0.310 0.064 0.131 ... 0.059 0.531 0.138 0.149 0.186 0.137 0.888 0.041 0.147 0.083

5 rows × 120 columns

In [ ]:
X_wk_C1 = X_wk_C1.values

########### Ranging the values from 0 to 1
#scaler = MinMaxScaler(feature_range=(0, 1))
#Xtrain = scaler.fit(Xtrain.reshape(0, 1))
#X_wk_C1 = scaler.fit_transform(X_wk_C1)


#####Avoiding Outliers
cap = np.percentile(X_wk_C1, 97)   
X_wk_C1[X_wk_C1 > cap] = cap
In [ ]:
training_size=int(X_wk_C1.shape[0]*0.80)

test_size=(X_wk_C1.shape[0])-training_size

train,test=X_wk_C1[0:training_size],X_wk_C1[training_size:(X_wk_C1.shape[0])]
In [ ]:
#Slidind Window
#convert an array of values into a dataset matrix
#def create_dataset(dataset, time_step=1):
#	dataX, dataY = [], []
#	for i in range(len(dataset)-time_step-1):
#		a = dataset[i:(i+time_step),]   ###i=0, 0,1,2,3---- #
#		dataX.append(a)
#		dataY.append(dataset[i + time_step,])
#	return np.array(dataX), np.array(dataY)

#trainX, trainY = create_dataset(train, time_step)
#testX,testY = create_dataset(test, time_step)
#valX, valY = create_dataset(val, time_step)
In [ ]:
# reshape into X=t,t+1,t+2,t+3 and Y=t+4
#time_step = 48

def get_batches(data, input_interval, target_interval, output_step_offset):
  batched_data_x = []
  batched_data_y = []
  for i in range(input_interval, (len(data) - target_interval - output_step_offset)):
    batched_data_x.append(data[i-input_interval:i])
    batched_data_y.append(data[i+output_step_offset:i+target_interval+output_step_offset])
  batched_data_x, batched_data_y = np.array(batched_data_x), np.array(batched_data_y)
  return batched_data_x, batched_data_y
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)
In [ ]:
print(trainX.shape, trainY.shape)
print(testX.shape, testY.shape)
(9840, 48, 120) (9840, 48, 120)
(2352, 48, 120) (2352, 48, 120)
In [ ]:
                             ###Building a sequential network:
Model_1 = models.Sequential()
Model_1.add(layers.Dense(400, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_1.add(Dropout(.01))
Model_1.add(BatchNormalization())

Model_1.add(Dense(200))
Model_1.add(Dropout(.01))
Model_1.add(BatchNormalization())

Model_1.add((Dense(trainX.shape[2])))
Model_1.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_1.summary()
Model: "sequential_1"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_3 (Dense)              (None, 48, 400)           48400     
_________________________________________________________________
dropout_2 (Dropout)          (None, 48, 400)           0         
_________________________________________________________________
batch_normalization_2 (Batch (None, 48, 400)           1600      
_________________________________________________________________
dense_4 (Dense)              (None, 48, 200)           80200     
_________________________________________________________________
dropout_3 (Dropout)          (None, 48, 200)           0         
_________________________________________________________________
batch_normalization_3 (Batch (None, 48, 200)           800       
_________________________________________________________________
dense_5 (Dense)              (None, 48, 120)           24120     
=================================================================
Total params: 155,120
Trainable params: 153,920
Non-trainable params: 1,200
_________________________________________________________________
In [ ]:
model_train = Model_1.fit(trainX,trainY, epochs=30, validation_split = 0.10, batch_size=64)
Epoch 1/30
139/139 [==============================] - 9s 66ms/step - loss: 0.1586 - mae: 0.2596 - val_loss: 0.0473 - val_mae: 0.1604
Epoch 2/30
139/139 [==============================] - 9s 62ms/step - loss: 0.0587 - mae: 0.1669 - val_loss: 0.0432 - val_mae: 0.1416
Epoch 3/30
139/139 [==============================] - 8s 60ms/step - loss: 0.0558 - mae: 0.1619 - val_loss: 0.0416 - val_mae: 0.1317
Epoch 4/30
139/139 [==============================] - 9s 64ms/step - loss: 0.0537 - mae: 0.1583 - val_loss: 0.0417 - val_mae: 0.1325
Epoch 5/30
139/139 [==============================] - 9s 67ms/step - loss: 0.0521 - mae: 0.1555 - val_loss: 0.0416 - val_mae: 0.1320
Epoch 6/30
139/139 [==============================] - 9s 65ms/step - loss: 0.0505 - mae: 0.1528 - val_loss: 0.0415 - val_mae: 0.1314
Epoch 7/30
139/139 [==============================] - 9s 67ms/step - loss: 0.0491 - mae: 0.1505 - val_loss: 0.0416 - val_mae: 0.1303
Epoch 8/30
139/139 [==============================] - 9s 66ms/step - loss: 0.0477 - mae: 0.1480 - val_loss: 0.0416 - val_mae: 0.1304
Epoch 9/30
139/139 [==============================] - 9s 67ms/step - loss: 0.0466 - mae: 0.1459 - val_loss: 0.0417 - val_mae: 0.1312
Epoch 10/30
139/139 [==============================] - 9s 65ms/step - loss: 0.0456 - mae: 0.1441 - val_loss: 0.0419 - val_mae: 0.1301
Epoch 11/30
139/139 [==============================] - 9s 66ms/step - loss: 0.0445 - mae: 0.1424 - val_loss: 0.0420 - val_mae: 0.1313
Epoch 12/30
139/139 [==============================] - 9s 65ms/step - loss: 0.0437 - mae: 0.1410 - val_loss: 0.0420 - val_mae: 0.1319
Epoch 13/30
139/139 [==============================] - 9s 65ms/step - loss: 0.0427 - mae: 0.1393 - val_loss: 0.0422 - val_mae: 0.1328
Epoch 14/30
139/139 [==============================] - 9s 66ms/step - loss: 0.0418 - mae: 0.1378 - val_loss: 0.0422 - val_mae: 0.1317
Epoch 15/30
139/139 [==============================] - 9s 65ms/step - loss: 0.0410 - mae: 0.1366 - val_loss: 0.0426 - val_mae: 0.1335
Epoch 16/30
139/139 [==============================] - 9s 66ms/step - loss: 0.0402 - mae: 0.1353 - val_loss: 0.0428 - val_mae: 0.1320
Epoch 17/30
139/139 [==============================] - 9s 64ms/step - loss: 0.0395 - mae: 0.1343 - val_loss: 0.0431 - val_mae: 0.1336
Epoch 18/30
139/139 [==============================] - 10s 69ms/step - loss: 0.0387 - mae: 0.1331 - val_loss: 0.0433 - val_mae: 0.1340
Epoch 19/30
139/139 [==============================] - 10s 70ms/step - loss: 0.0380 - mae: 0.1320 - val_loss: 0.0437 - val_mae: 0.1339
Epoch 20/30
139/139 [==============================] - 9s 68ms/step - loss: 0.0374 - mae: 0.1311 - val_loss: 0.0439 - val_mae: 0.1349
Epoch 21/30
139/139 [==============================] - 9s 67ms/step - loss: 0.0368 - mae: 0.1301 - val_loss: 0.0442 - val_mae: 0.1361
Epoch 22/30
139/139 [==============================] - 9s 66ms/step - loss: 0.0363 - mae: 0.1293 - val_loss: 0.0444 - val_mae: 0.1367
Epoch 23/30
139/139 [==============================] - 9s 67ms/step - loss: 0.0359 - mae: 0.1287 - val_loss: 0.0446 - val_mae: 0.1372
Epoch 24/30
139/139 [==============================] - 9s 67ms/step - loss: 0.0354 - mae: 0.1279 - val_loss: 0.0448 - val_mae: 0.1376
Epoch 25/30
139/139 [==============================] - 10s 71ms/step - loss: 0.0349 - mae: 0.1272 - val_loss: 0.0453 - val_mae: 0.1375
Epoch 26/30
139/139 [==============================] - 10s 68ms/step - loss: 0.0345 - mae: 0.1265 - val_loss: 0.0454 - val_mae: 0.1371
Epoch 27/30
139/139 [==============================] - 9s 65ms/step - loss: 0.0342 - mae: 0.1260 - val_loss: 0.0457 - val_mae: 0.1386
Epoch 28/30
139/139 [==============================] - 9s 65ms/step - loss: 0.0339 - mae: 0.1255 - val_loss: 0.0458 - val_mae: 0.1393
Epoch 29/30
139/139 [==============================] - 9s 63ms/step - loss: 0.0337 - mae: 0.1252 - val_loss: 0.0460 - val_mae: 0.1406
Epoch 30/30
139/139 [==============================] - 9s 62ms/step - loss: 0.0334 - mae: 0.1247 - val_loss: 0.0462 - val_mae: 0.1403
In [ ]:
Seq_train = Model_1.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_1.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
308/308 [==============================] - 2s 7ms/step
[[ 0.14481649  0.14239839  0.23138762 ...  0.03541338  0.7538085
  -0.03572141]
 [ 0.18548644  0.17005312  0.24631096 ...  0.05665206  0.70213217
   0.08747752]
 [ 0.17229593  0.16830881  0.16172546 ...  0.08683593  0.70317113
   0.18256475]
 ...
 [ 0.7036396   0.25997296  0.9662459  ...  0.6298128   0.45992577
   0.11812274]
 [ 0.51161605  0.20916735  0.7805147  ...  0.25114092  0.40067142
   0.13489501]
 [ 0.44679612  0.23655745  0.61738276 ...  0.15329945  0.7234678
   0.21475682]] (9840, 48, 120)
74/74 [==============================] - 1s 7ms/step
[[0.26902968 0.26319808 0.14790496 ... 0.08369416 0.7706778  0.04181391]
 [0.18772104 0.2336326  0.06947066 ... 0.05983894 0.7987759  0.17523266]
 [0.24980417 0.17504488 0.03526912 ... 0.03360352 0.74350643 0.1043799 ]
 ...
 [0.46321374 0.36587137 0.19859348 ... 0.18075788 0.33623672 0.2301347 ]
 [0.31461546 0.32559675 0.08119497 ... 0.1680882  0.3933826  0.21454489]
 [0.21066889 0.23321235 0.26615083 ... 0.14585021 0.3647926  0.18270029]] (2352, 48, 120)
In [ ]:
def rmse(actual, pred):
    return np.sqrt(((pred - actual) ** 2).mean())
def mae(actual, pred):
    return np.mean(np.abs(actual - pred))
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY_RMSE, testingtrain_C1))
#print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))
Test Score: 0.28 RMSE
In [ ]:
#trainMAE = np.mean(mae(trainY[:48,1,1], testingtrain_C2[:48,1,1]))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.19 MAE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(Seq_test[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,47,1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,47,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
lr_decay = callbacks.LearningRateScheduler(schedule=lambda epoch: 0.001 * (0.80 ** epoch))
In [ ]:
model = Sequential()
model.add(LSTM(200, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))
#model.add(Dropout(.2))

#model.add(LSTM(100, activation='relu', return_sequences=True))
#model.add(Dropout(.2))

model.add(Dense(trainX.shape[2]))
#sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9),   'optimizers.Adam(lr=0.01)'
model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
Model: "sequential_3"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_1 (LSTM)                (None, 48, 200)           256800    
_________________________________________________________________
dense_7 (Dense)              (None, 48, 120)           24120     
=================================================================
Total params: 280,920
Trainable params: 280,920
Non-trainable params: 0
_________________________________________________________________
In [ ]:
lr_decay = callbacks.LearningRateScheduler(schedule=lambda epoch: 0.001 * (0.80 ** epoch))
In [ ]:
# fit model
history_A_C1 = model.fit(trainX, trainY, 
                         epochs=30, 
                         validation_split = 0.05, 
                         batch_size = 64, 
                         callbacks=[lr_decay])
Epoch 1/30
147/147 [==============================] - 18s 122ms/step - loss: 0.0604 - mae: 0.1653 - val_loss: 0.0402 - val_mae: 0.1323
Epoch 2/30
147/147 [==============================] - 17s 114ms/step - loss: 0.0483 - mae: 0.1458 - val_loss: 0.0393 - val_mae: 0.1323
Epoch 3/30
147/147 [==============================] - 15s 101ms/step - loss: 0.0461 - mae: 0.1415 - val_loss: 0.0389 - val_mae: 0.1283
Epoch 4/30
147/147 [==============================] - 15s 103ms/step - loss: 0.0442 - mae: 0.1383 - val_loss: 0.0385 - val_mae: 0.1260
Epoch 5/30
147/147 [==============================] - 14s 96ms/step - loss: 0.0430 - mae: 0.1363 - val_loss: 0.0385 - val_mae: 0.1259
Epoch 6/30
147/147 [==============================] - 15s 102ms/step - loss: 0.0422 - mae: 0.1349 - val_loss: 0.0384 - val_mae: 0.1268
Epoch 7/30
147/147 [==============================] - 16s 106ms/step - loss: 0.0416 - mae: 0.1339 - val_loss: 0.0386 - val_mae: 0.1265
Epoch 8/30
147/147 [==============================] - 15s 101ms/step - loss: 0.0411 - mae: 0.1331 - val_loss: 0.0385 - val_mae: 0.1262
Epoch 9/30
147/147 [==============================] - 16s 107ms/step - loss: 0.0407 - mae: 0.1325 - val_loss: 0.0385 - val_mae: 0.1255
Epoch 10/30
147/147 [==============================] - 14s 98ms/step - loss: 0.0405 - mae: 0.1320 - val_loss: 0.0385 - val_mae: 0.1260
Epoch 11/30
147/147 [==============================] - 14s 98ms/step - loss: 0.0402 - mae: 0.1316 - val_loss: 0.0386 - val_mae: 0.1256
Epoch 12/30
147/147 [==============================] - 15s 101ms/step - loss: 0.0400 - mae: 0.1313 - val_loss: 0.0386 - val_mae: 0.1262
Epoch 13/30
147/147 [==============================] - 15s 101ms/step - loss: 0.0399 - mae: 0.1311 - val_loss: 0.0386 - val_mae: 0.1259
Epoch 14/30
147/147 [==============================] - 18s 122ms/step - loss: 0.0398 - mae: 0.1309 - val_loss: 0.0386 - val_mae: 0.1258
Epoch 15/30
147/147 [==============================] - 17s 114ms/step - loss: 0.0397 - mae: 0.1307 - val_loss: 0.0387 - val_mae: 0.1260
Epoch 16/30
147/147 [==============================] - 16s 106ms/step - loss: 0.0396 - mae: 0.1306 - val_loss: 0.0386 - val_mae: 0.1262
Epoch 17/30
147/147 [==============================] - 14s 96ms/step - loss: 0.0395 - mae: 0.1305 - val_loss: 0.0387 - val_mae: 0.1264
Epoch 18/30
147/147 [==============================] - 15s 101ms/step - loss: 0.0395 - mae: 0.1304 - val_loss: 0.0387 - val_mae: 0.1263
Epoch 19/30
147/147 [==============================] - 15s 103ms/step - loss: 0.0394 - mae: 0.1303 - val_loss: 0.0387 - val_mae: 0.1262
Epoch 20/30
147/147 [==============================] - 16s 107ms/step - loss: 0.0394 - mae: 0.1303 - val_loss: 0.0387 - val_mae: 0.1262
Epoch 21/30
147/147 [==============================] - 17s 118ms/step - loss: 0.0393 - mae: 0.1302 - val_loss: 0.0387 - val_mae: 0.1263
Epoch 22/30
147/147 [==============================] - 14s 98ms/step - loss: 0.0393 - mae: 0.1302 - val_loss: 0.0387 - val_mae: 0.1263
Epoch 23/30
147/147 [==============================] - 14s 98ms/step - loss: 0.0393 - mae: 0.1301 - val_loss: 0.0387 - val_mae: 0.1262
Epoch 24/30
147/147 [==============================] - 13s 88ms/step - loss: 0.0393 - mae: 0.1301 - val_loss: 0.0387 - val_mae: 0.1264
Epoch 25/30
147/147 [==============================] - 15s 103ms/step - loss: 0.0393 - mae: 0.1301 - val_loss: 0.0387 - val_mae: 0.1262
Epoch 26/30
147/147 [==============================] - 18s 121ms/step - loss: 0.0393 - mae: 0.1300 - val_loss: 0.0387 - val_mae: 0.1263
Epoch 27/30
147/147 [==============================] - 16s 106ms/step - loss: 0.0393 - mae: 0.1301 - val_loss: 0.0387 - val_mae: 0.1262
Epoch 28/30
147/147 [==============================] - 15s 103ms/step - loss: 0.0392 - mae: 0.1300 - val_loss: 0.0387 - val_mae: 0.1263
Epoch 29/30
147/147 [==============================] - 16s 109ms/step - loss: 0.0392 - mae: 0.1300 - val_loss: 0.0387 - val_mae: 0.1262
Epoch 30/30
147/147 [==============================] - 16s 109ms/step - loss: 0.0392 - mae: 0.1300 - val_loss: 0.0387 - val_mae: 0.1263
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_A_C1.history['loss'], label='train')
plt.plot(history_A_C1.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_C1 = model.predict(trainX, verbose = 1)
print(testingtrain_C1[0], testingtrain_C1.shape)

testingtest_C1 = model.predict(testX, verbose = 1)
print(testingtest_C1[0], testingtest_C1.shape)
308/308 [==============================] - 7s 22ms/step
[[0.23641679 0.20330316 0.1703359  ... 0.22079186 0.4644683  0.10141306]
 [0.22986552 0.17734039 0.22842494 ... 0.22847004 0.5028927  0.07368913]
 [0.21136482 0.17883044 0.24340828 ... 0.22056355 0.4479086  0.01676399]
 ...
 [0.7245015  0.2983994  0.77408594 ... 0.5288191  0.22720687 0.0913708 ]
 [0.5959039  0.24902365 0.6702227  ... 0.29246244 0.38909623 0.07780965]
 [0.43107724 0.24433887 0.5930871  ... 0.1694843  0.726064   0.11084637]] (9840, 48, 120)
74/74 [==============================] - 2s 24ms/step
[[0.17690131 0.19552088 0.09121437 ... 0.11379929 0.45082328 0.17128283]
 [0.20706119 0.22257465 0.10521824 ... 0.12695606 0.63123184 0.23119986]
 [0.21638268 0.21442798 0.10266185 ... 0.14707524 0.75401264 0.13406564]
 ...
 [0.45188197 0.22279942 0.13838255 ... 0.1066815  0.42110798 0.23478976]
 [0.32335514 0.20990211 0.10412066 ... 0.08301444 0.50834006 0.18038957]
 [0.26638868 0.22386307 0.10978004 ... 0.06584243 0.5639781  0.1746386 ]] (2352, 48, 120)
In [ ]:
trainScore = math.sqrt(mean_squared_error(trainY[:,1,:], testingtrain_C1[:,1,:]))
print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_C1[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))
Train Score: 0.21 RMSE
Test Score: 0.26 RMSE
In [ ]:
trainMAE = np.mean(mae(trainY[:,1,:], testingtrain_C1[:,1,:]))
print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], testingtest_C1[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Train Score: 0.14 MAE
Test Score: 0.18 MAE
In [ ]:
def rmse(actual, pred):
    return np.sqrt(((pred - actual) ** 2).mean())
def mae(actual, pred):
    return np.mean(np.abs(actual - pred))
In [ ]:
plt.imshow(trainY[:47,47,:])
plt.show()
plt.imshow(testingtrain_C1[:47,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,:1], marker='.', label="actual")
plt.plot(aa, testingtest_C1[:,1,:1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
X_wk_C2 = X_wk_C2.drop(columns = 'cluster')
X_wk_C2 = X_wk_C2.transpose()
X_wk_C2.head()
Out[ ]:
6 8 17 20 21 25 26 33 35 46 ... 254 255 256 267 276 279 293 294 299 300
Datetime
2012-07-02 00:00:00 0.152 0.079 0.139 2.350 0.185 0.150 0.838 0.291 0.340 0.093 ... 0.693 1.033 0.151 1.306 0.094 0.069 0.155 0.122 0.098 0.888
2012-07-02 00:30:00 0.392 0.135 0.160 2.300 0.168 0.063 0.838 0.286 0.239 0.165 ... 0.784 0.977 0.187 1.328 0.069 0.106 0.219 0.209 0.043 0.553
2012-07-02 01:00:00 0.329 0.078 0.121 2.325 0.162 0.050 0.863 0.285 0.218 0.090 ... 0.750 1.040 0.145 1.170 0.101 0.081 0.201 0.139 0.094 0.371
2012-07-02 01:30:00 0.318 0.137 0.121 1.925 0.182 0.088 0.850 0.297 0.235 0.090 ... 0.700 0.995 0.174 1.168 0.086 0.100 0.173 0.127 0.051 0.222
2012-07-02 02:00:00 0.312 0.081 0.149 0.113 0.177 0.050 0.850 0.279 0.257 0.139 ... 0.747 0.934 0.139 1.186 0.082 0.069 0.209 0.142 0.087 0.099

5 rows × 58 columns

In [ ]:
X_wk_C2 = X_wk_C2.values
########### Ranging the values from 0 to 1
#scaler = MinMaxScaler(feature_range=(0, 1))
#Xtrain = scaler.fit(Xtrain.reshape(0, 1))
#X_wk_C1 = scaler.fit_transform(X_wk_C1)
#####Avoiding Outliers
cap = np.percentile(X_wk_C2, 97)   
X_wk_C2[X_wk_C2 > cap] = cap
In [ ]:
training_size=int(X_wk_C2.shape[0]*0.80)

test_size=(X_wk_C2.shape[0])-training_size

train,test=X_wk_C2[0:training_size],X_wk_C2[training_size:(X_wk_C2.shape[0])]
In [ ]:
def get_batches(data, input_interval, target_interval, output_step_offset):
  batched_data_x = []
  batched_data_y = []
  for i in range(input_interval, (len(data) - target_interval - output_step_offset)):
    batched_data_x.append(data[i-input_interval:i])
    batched_data_y.append(data[i+output_step_offset:i+target_interval+output_step_offset])
  batched_data_x, batched_data_y = np.array(batched_data_x), np.array(batched_data_y)
  return batched_data_x, batched_data_y
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)
In [ ]:
print(trainX.shape, trainY.shape)
print(testX.shape, testY.shape)
(9840, 48, 58) (9840, 48, 58)
(2352, 48, 58) (2352, 48, 58)
In [ ]:
                            ###Building a sequential network:
Model_2 = models.Sequential()
Model_2.add(layers.Dense(300, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_2.add(Dropout(0.2))
Model_2.add(BatchNormalization())

Model_2.add(layers.Dense(100, activation='relu'))
Model_2.add(Dropout(0.2))
Model_2.add(BatchNormalization())

Model_2.add((Dense(trainX.shape[2])))
Model_2.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_2.summary()
Model: "sequential_6"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_14 (Dense)             (None, 48, 300)           17700     
_________________________________________________________________
dropout_9 (Dropout)          (None, 48, 300)           0         
_________________________________________________________________
batch_normalization_8 (Batch (None, 48, 300)           1200      
_________________________________________________________________
dense_15 (Dense)             (None, 48, 100)           30100     
_________________________________________________________________
dropout_10 (Dropout)         (None, 48, 100)           0         
_________________________________________________________________
batch_normalization_9 (Batch (None, 48, 100)           400       
_________________________________________________________________
dense_16 (Dense)             (None, 48, 58)            5858      
=================================================================
Total params: 55,258
Trainable params: 54,458
Non-trainable params: 800
_________________________________________________________________
In [ ]:
model_train = Model_2.fit(trainX,trainY, epochs=30, validation_split = 0.10, batch_size=64)
Epoch 1/30
139/139 [==============================] - 5s 39ms/step - loss: 0.5391 - mae: 0.5266 - val_loss: 0.1154 - val_mae: 0.2515
Epoch 2/30
139/139 [==============================] - 6s 40ms/step - loss: 0.1812 - mae: 0.2973 - val_loss: 0.1059 - val_mae: 0.2268
Epoch 3/30
139/139 [==============================] - 6s 40ms/step - loss: 0.1390 - mae: 0.2587 - val_loss: 0.1000 - val_mae: 0.2128
Epoch 4/30
139/139 [==============================] - 6s 40ms/step - loss: 0.1250 - mae: 0.2439 - val_loss: 0.0977 - val_mae: 0.2067
Epoch 5/30
139/139 [==============================] - 6s 41ms/step - loss: 0.1190 - mae: 0.2373 - val_loss: 0.0970 - val_mae: 0.2058
Epoch 6/30
139/139 [==============================] - 5s 39ms/step - loss: 0.1149 - mae: 0.2326 - val_loss: 0.0958 - val_mae: 0.2032
Epoch 7/30
139/139 [==============================] - 5s 38ms/step - loss: 0.1119 - mae: 0.2292 - val_loss: 0.0952 - val_mae: 0.2024
Epoch 8/30
139/139 [==============================] - 5s 38ms/step - loss: 0.1092 - mae: 0.2263 - val_loss: 0.0953 - val_mae: 0.2025
Epoch 9/30
139/139 [==============================] - 6s 41ms/step - loss: 0.1071 - mae: 0.2238 - val_loss: 0.0951 - val_mae: 0.2024
Epoch 10/30
139/139 [==============================] - 6s 43ms/step - loss: 0.1052 - mae: 0.2217 - val_loss: 0.0945 - val_mae: 0.2027
Epoch 11/30
139/139 [==============================] - 5s 38ms/step - loss: 0.1036 - mae: 0.2199 - val_loss: 0.0953 - val_mae: 0.2012
Epoch 12/30
139/139 [==============================] - 6s 42ms/step - loss: 0.1022 - mae: 0.2184 - val_loss: 0.0945 - val_mae: 0.2018
Epoch 13/30
139/139 [==============================] - 5s 39ms/step - loss: 0.1009 - mae: 0.2170 - val_loss: 0.0941 - val_mae: 0.2002
Epoch 14/30
139/139 [==============================] - 6s 40ms/step - loss: 0.0999 - mae: 0.2160 - val_loss: 0.0945 - val_mae: 0.1998
Epoch 15/30
139/139 [==============================] - 5s 38ms/step - loss: 0.0987 - mae: 0.2146 - val_loss: 0.0938 - val_mae: 0.2014
Epoch 16/30
139/139 [==============================] - 6s 41ms/step - loss: 0.0976 - mae: 0.2137 - val_loss: 0.0948 - val_mae: 0.2002
Epoch 17/30
139/139 [==============================] - 5s 38ms/step - loss: 0.0968 - mae: 0.2128 - val_loss: 0.0945 - val_mae: 0.1993
Epoch 18/30
139/139 [==============================] - 5s 35ms/step - loss: 0.0955 - mae: 0.2115 - val_loss: 0.0949 - val_mae: 0.1997
Epoch 19/30
139/139 [==============================] - 5s 35ms/step - loss: 0.0949 - mae: 0.2109 - val_loss: 0.0937 - val_mae: 0.2003
Epoch 20/30
139/139 [==============================] - 6s 40ms/step - loss: 0.0941 - mae: 0.2101 - val_loss: 0.0945 - val_mae: 0.2004
Epoch 21/30
139/139 [==============================] - 5s 40ms/step - loss: 0.0935 - mae: 0.2095 - val_loss: 0.0951 - val_mae: 0.2033
Epoch 22/30
139/139 [==============================] - 6s 41ms/step - loss: 0.0929 - mae: 0.2090 - val_loss: 0.0950 - val_mae: 0.2035
Epoch 23/30
139/139 [==============================] - 5s 39ms/step - loss: 0.0925 - mae: 0.2084 - val_loss: 0.0949 - val_mae: 0.2011
Epoch 24/30
139/139 [==============================] - 6s 41ms/step - loss: 0.0918 - mae: 0.2076 - val_loss: 0.0951 - val_mae: 0.2055
Epoch 25/30
139/139 [==============================] - 6s 44ms/step - loss: 0.0914 - mae: 0.2073 - val_loss: 0.0945 - val_mae: 0.2030
Epoch 26/30
139/139 [==============================] - 6s 42ms/step - loss: 0.0908 - mae: 0.2066 - val_loss: 0.0952 - val_mae: 0.2038
Epoch 27/30
139/139 [==============================] - 6s 40ms/step - loss: 0.0904 - mae: 0.2061 - val_loss: 0.0956 - val_mae: 0.2014
Epoch 28/30
139/139 [==============================] - 6s 40ms/step - loss: 0.0900 - mae: 0.2057 - val_loss: 0.0946 - val_mae: 0.1997
Epoch 29/30
139/139 [==============================] - 6s 44ms/step - loss: 0.0896 - mae: 0.2052 - val_loss: 0.0952 - val_mae: 0.2006
Epoch 30/30
139/139 [==============================] - 6s 43ms/step - loss: 0.0894 - mae: 0.2050 - val_loss: 0.0952 - val_mae: 0.1993
In [ ]:
Seq_train = Model_2.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_2.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
308/308 [==============================] - 1s 4ms/step
[[0.4676332  0.2104396  0.2007122  ... 0.26167086 0.46753728 0.43467042]
 [0.5069925  0.18020485 0.2044988  ... 0.21563022 0.41773283 0.2833166 ]
 [0.44087237 0.1876678  0.13894345 ... 0.178669   0.35156134 0.21537524]
 ...
 [0.77466774 0.3615078  0.395022   ... 0.38779205 0.60906994 0.57448494]
 [0.66462106 0.10011488 0.2457135  ... 0.30903685 0.40432632 0.88692725]
 [0.6127568  0.12182742 0.35974595 ... 0.24418929 0.4384357  0.787462  ]] (9840, 48, 58)
74/74 [==============================] - 0s 4ms/step
[[0.2558316  0.20079629 0.28153646 ... 0.44564265 0.26198608 0.531197  ]
 [0.31228393 0.2439728  0.29213658 ... 0.46719843 0.25329962 0.33736098]
 [0.30150196 0.2192012  0.26355112 ... 0.57387996 0.24218297 0.31296864]
 ...
 [0.3274149  0.19263275 0.2840103  ... 0.37668148 0.2656976  0.7679692 ]
 [0.28331962 0.1754362  0.21239117 ... 0.35377237 0.27887732 1.1420795 ]
 [0.27009085 0.20502108 0.26177856 ... 0.3928157  0.22234194 0.87688667]] (2352, 48, 58)
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY_RMSE, testingtrain_C1))
#print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:48,1,1], Seq_test[:48,1,1]))
print('Test Score: %.2f RMSE' % (testScore))

#trainMAE = np.mean(mae(trainY[:48,1,1], testingtrain_C2[:48,1,1]))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:48,1,1], Seq_test[:48,1,1]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.33 RMSE
Test Score: 0.21 MAE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(Seq_test[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,47,1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,47,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
lr_decay = callbacks.LearningRateScheduler(schedule=lambda epoch: 0.001 * (0.80 ** epoch))
In [ ]:
model = Sequential()
model.add(LSTM(70, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))
model.add(Dropout(.2))


model.add(Dense(trainX.shape[2]))
#sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9),   'optimizers.Adam(lr=0.01)'
model.compile(optimizer = 'adam', metrics = 'mae', loss='mse')
model.summary()
Model: "sequential_11"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_6 (LSTM)                (None, 48, 70)            36120     
_________________________________________________________________
dropout_12 (Dropout)         (None, 48, 70)            0         
_________________________________________________________________
dense_21 (Dense)             (None, 48, 58)            4118      
=================================================================
Total params: 40,238
Trainable params: 40,238
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_C2 = model.fit(trainX,trainY, epochs=30,
                       batch_size = 32,
                       validation_split = 0.10, callbacks=[lr_decay])
Epoch 1/30
277/277 [==============================] - 8s 31ms/step - loss: 0.1732 - mae: 0.2929 - val_loss: 0.1078 - val_mae: 0.2283
Epoch 2/30
277/277 [==============================] - 8s 28ms/step - loss: 0.6810 - mae: 0.2747 - val_loss: 0.1032 - val_mae: 0.2196
Epoch 3/30
277/277 [==============================] - 7s 26ms/step - loss: 0.1359 - mae: 0.2566 - val_loss: 0.1053 - val_mae: 0.2248
Epoch 4/30
277/277 [==============================] - 8s 29ms/step - loss: 0.1354 - mae: 0.2562 - val_loss: 0.1026 - val_mae: 0.2217
Epoch 5/30
277/277 [==============================] - 7s 25ms/step - loss: 0.1321 - mae: 0.2530 - val_loss: 0.1017 - val_mae: 0.2201
Epoch 6/30
277/277 [==============================] - 7s 27ms/step - loss: 0.1301 - mae: 0.2515 - val_loss: 0.1011 - val_mae: 0.2187
Epoch 7/30
277/277 [==============================] - 8s 30ms/step - loss: 0.1286 - mae: 0.2500 - val_loss: 0.1008 - val_mae: 0.2163
Epoch 8/30
277/277 [==============================] - 8s 29ms/step - loss: 0.1276 - mae: 0.2488 - val_loss: 0.1013 - val_mae: 0.2180
Epoch 9/30
277/277 [==============================] - 8s 28ms/step - loss: 0.1268 - mae: 0.2480 - val_loss: 0.1010 - val_mae: 0.2183
Epoch 10/30
277/277 [==============================] - 7s 26ms/step - loss: 0.1263 - mae: 0.2475 - val_loss: 0.1007 - val_mae: 0.2162
Epoch 11/30
277/277 [==============================] - 7s 25ms/step - loss: 0.1258 - mae: 0.2468 - val_loss: 0.1008 - val_mae: 0.2165
Epoch 12/30
277/277 [==============================] - 8s 29ms/step - loss: 0.1253 - mae: 0.2463 - val_loss: 0.1006 - val_mae: 0.2151
Epoch 13/30
277/277 [==============================] - 6s 23ms/step - loss: 0.1250 - mae: 0.2458 - val_loss: 0.1005 - val_mae: 0.2156
Epoch 14/30
277/277 [==============================] - 7s 24ms/step - loss: 0.1248 - mae: 0.2457 - val_loss: 0.1004 - val_mae: 0.2150
Epoch 15/30
277/277 [==============================] - 8s 28ms/step - loss: 0.1246 - mae: 0.2454 - val_loss: 0.1003 - val_mae: 0.2153
Epoch 16/30
277/277 [==============================] - 7s 24ms/step - loss: 0.1245 - mae: 0.2452 - val_loss: 0.1003 - val_mae: 0.2152
Epoch 17/30
277/277 [==============================] - 7s 24ms/step - loss: 0.1243 - mae: 0.2451 - val_loss: 0.1003 - val_mae: 0.2154
Epoch 18/30
277/277 [==============================] - 8s 30ms/step - loss: 0.1242 - mae: 0.2450 - val_loss: 0.1002 - val_mae: 0.2154
Epoch 19/30
277/277 [==============================] - 7s 25ms/step - loss: 0.1241 - mae: 0.2448 - val_loss: 0.1002 - val_mae: 0.2151
Epoch 20/30
277/277 [==============================] - 7s 26ms/step - loss: 0.1240 - mae: 0.2448 - val_loss: 0.1002 - val_mae: 0.2153
Epoch 21/30
277/277 [==============================] - 7s 24ms/step - loss: 0.1240 - mae: 0.2448 - val_loss: 0.1002 - val_mae: 0.2149
Epoch 22/30
277/277 [==============================] - 8s 28ms/step - loss: 0.1240 - mae: 0.2447 - val_loss: 0.1001 - val_mae: 0.2149
Epoch 23/30
277/277 [==============================] - 7s 25ms/step - loss: 0.1239 - mae: 0.2446 - val_loss: 0.1001 - val_mae: 0.2152
Epoch 24/30
277/277 [==============================] - 7s 27ms/step - loss: 0.1239 - mae: 0.2446 - val_loss: 0.1001 - val_mae: 0.2151
Epoch 25/30
277/277 [==============================] - 8s 29ms/step - loss: 0.1239 - mae: 0.2446 - val_loss: 0.1001 - val_mae: 0.2151
Epoch 26/30
277/277 [==============================] - 7s 25ms/step - loss: 0.1238 - mae: 0.2446 - val_loss: 0.1001 - val_mae: 0.2151
Epoch 27/30
277/277 [==============================] - 8s 30ms/step - loss: 0.1238 - mae: 0.2446 - val_loss: 0.1001 - val_mae: 0.2150
Epoch 28/30
277/277 [==============================] - 7s 27ms/step - loss: 0.1238 - mae: 0.2445 - val_loss: 0.1001 - val_mae: 0.2150
Epoch 29/30
277/277 [==============================] - 7s 25ms/step - loss: 0.1237 - mae: 0.2444 - val_loss: 0.1001 - val_mae: 0.2151
Epoch 30/30
277/277 [==============================] - 8s 29ms/step - loss: 0.1238 - mae: 0.2445 - val_loss: 0.1001 - val_mae: 0.2151
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_C2.history['loss'], label='train')
plt.plot(history_C2.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_C2 = model.predict(trainX, verbose = 1)
print(testingtrain_C2[0], testingtrain_C2.shape)

testingtest_C2 = model.predict(testX, verbose = 1)
print(testingtest_C2[0], testingtest_C2.shape)
308/308 [==============================] - 2s 8ms/step
[[0.4851358  0.35533905 0.3974049  ... 0.43887818 0.3852932  0.57986253]
 [0.48351657 0.23210445 0.26266518 ... 0.4237411  0.33704576 0.55047584]
 [0.4325887  0.1469125  0.17240575 ... 0.33983696 0.26174968 0.41128427]
 ...
 [0.85429317 0.50012934 0.7333682  ... 0.2565592  0.8652249  0.643064  ]
 [0.7364267  0.33889714 0.6093894  ... 0.22157678 0.68466055 0.68325347]
 [0.66885227 0.2710568  0.44733012 ... 0.23759858 0.5700684  0.6198025 ]] (9840, 48, 58)
74/74 [==============================] - 0s 6ms/step
[[0.32406333 0.28887883 0.3217318  ... 0.401958   0.28655213 0.48983258]
 [0.33180103 0.20391695 0.227333   ... 0.4385087  0.2852313  0.48693424]
 [0.3076753  0.21000502 0.23426394 ... 0.40994915 0.27251923 0.4028259 ]
 ...
 [0.43280143 0.41548163 0.540333   ... 0.46178925 0.41714492 0.7476385 ]
 [0.4190235  0.33061287 0.42207116 ... 0.4516393  0.34132722 0.8274625 ]
 [0.3596642  0.24519283 0.30230492 ... 0.43560535 0.2477414  0.72597826]] (2352, 48, 58)
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY[:,1,:], testingtrain_C2[:,1,:]))
#print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_C2[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))
Test Score: 0.40 RMSE
In [ ]:
#trainMAE = np.mean(mae(trainY[:,1,:], testingtrain_C3[:,1,:]))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], testingtest_C2[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.31 MAE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(testingtest_C2[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,1], marker='.', label="actual")
plt.plot(aa, testingtest_C2[:,1,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
X_wk_C3 = X_wk_C3.drop(columns = 'cluster')
X_wk_C3 = X_wk_C3.transpose()
X_wk_C3.head()
Out[ ]:
1 3 4 5 7 9 10 14 15 18 ... 282 283 284 285 291 292 295 296 297 298
Datetime
2012-07-02 00:00:00 0.309 1.044 0.094 0.058 0.079 0.026 0.753 0.190 0.061 0.064 ... 0.319 0.079 0.679 0.085 0.428 0.031 0.099 0.036 0.158 0.146
2012-07-02 00:30:00 0.082 0.992 0.091 0.093 0.044 0.022 0.350 0.206 0.053 0.076 ... 0.250 0.046 0.377 0.080 0.212 0.081 0.091 0.042 0.223 0.117
2012-07-02 01:00:00 0.059 0.448 0.087 0.065 0.078 0.012 0.052 0.200 0.064 0.075 ... 0.188 0.055 0.178 0.070 0.263 0.025 0.084 0.019 0.104 0.073
2012-07-02 01:30:00 0.097 0.071 0.080 0.081 0.033 0.036 0.016 0.183 0.065 0.059 ... 0.200 0.153 0.215 0.080 0.131 0.156 0.070 0.052 0.154 0.064
2012-07-02 02:00:00 0.290 0.069 0.093 0.079 0.075 0.012 0.046 0.214 0.044 0.061 ... 0.150 0.079 0.152 0.068 0.152 0.363 0.100 0.036 0.085 0.067

5 rows × 121 columns

In [ ]:
X_wk_C3 = X_wk_C3.values
########### Ranging the values from 0 to 1
#scaler = MinMaxScaler(feature_range=(0, 1))
#Xtrain = scaler.fit(Xtrain.reshape(0, 1))
#X_wk_C1 = scaler.fit_transform(X_wk_C1)
#####Avoiding Outliers
cap = np.percentile(X_wk_C3, 97)   
X_wk_C3[X_wk_C3 > cap] = cap
In [ ]:
training_size=int(X_wk_C3.shape[0]*0.80)

test_size=(X_wk_C3.shape[0])-training_size

train,test=X_wk_C3[0:training_size],X_wk_C3[training_size:(X_wk_C3.shape[0])]
In [ ]:
def get_batches(data, input_interval, target_interval, output_step_offset):
  batched_data_x = []
  batched_data_y = []
  for i in range(input_interval, (len(data) - target_interval - output_step_offset)):
    batched_data_x.append(data[i-input_interval:i])
    batched_data_y.append(data[i+output_step_offset:i+target_interval+output_step_offset])
  batched_data_x, batched_data_y = np.array(batched_data_x), np.array(batched_data_y)
  return batched_data_x, batched_data_y
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)
In [ ]:
print(trainX.shape, trainY.shape)
print(testX.shape, testY.shape)
(9840, 48, 121) (9840, 48, 121)
(2352, 48, 121) (2352, 48, 121)
In [ ]:
                             ###Building a sequential network:
Model_3 = models.Sequential()
Model_3.add(layers.Dense(600, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_3.add(Dropout(0.5))
Model_3.add(BatchNormalization())

Model_3.add(layers.Dense(300, activation='relu'))
Model_3.add(Dropout(0.5))
Model_3.add(BatchNormalization())

Model_3.add((Dense(trainX.shape[2])))
Model_3.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_3.summary()
Model: "sequential_14"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_28 (Dense)             (None, 48, 600)           73200     
_________________________________________________________________
dropout_17 (Dropout)         (None, 48, 600)           0         
_________________________________________________________________
batch_normalization_14 (Batc (None, 48, 600)           2400      
_________________________________________________________________
dense_29 (Dense)             (None, 48, 300)           180300    
_________________________________________________________________
dropout_18 (Dropout)         (None, 48, 300)           0         
_________________________________________________________________
batch_normalization_15 (Batc (None, 48, 300)           1200      
_________________________________________________________________
dense_30 (Dense)             (None, 48, 121)           36421     
=================================================================
Total params: 293,521
Trainable params: 291,721
Non-trainable params: 1,800
_________________________________________________________________
In [ ]:
model_train = Model_3.fit(trainX,trainY, epochs=30, validation_split = 0.10, batch_size = 64)
Epoch 1/30
139/139 [==============================] - 14s 101ms/step - loss: 0.3369 - mae: 0.3333 - val_loss: 0.0202 - val_mae: 0.0935
Epoch 2/30
139/139 [==============================] - 13s 92ms/step - loss: 0.0334 - mae: 0.1091 - val_loss: 0.0186 - val_mae: 0.0883
Epoch 3/30
139/139 [==============================] - 12s 87ms/step - loss: 0.0224 - mae: 0.0949 - val_loss: 0.0174 - val_mae: 0.0849
Epoch 4/30
139/139 [==============================] - 13s 93ms/step - loss: 0.0214 - mae: 0.0933 - val_loss: 0.0168 - val_mae: 0.0810
Epoch 5/30
139/139 [==============================] - 12s 87ms/step - loss: 0.0210 - mae: 0.0924 - val_loss: 0.0166 - val_mae: 0.0816
Epoch 6/30
139/139 [==============================] - 13s 90ms/step - loss: 0.0206 - mae: 0.0917 - val_loss: 0.0164 - val_mae: 0.0793
Epoch 7/30
139/139 [==============================] - 12s 89ms/step - loss: 0.0204 - mae: 0.0913 - val_loss: 0.0163 - val_mae: 0.0785
Epoch 8/30
139/139 [==============================] - 12s 88ms/step - loss: 0.0201 - mae: 0.0908 - val_loss: 0.0162 - val_mae: 0.0797
Epoch 9/30
139/139 [==============================] - 13s 94ms/step - loss: 0.0199 - mae: 0.0903 - val_loss: 0.0164 - val_mae: 0.0773
Epoch 10/30
139/139 [==============================] - 12s 87ms/step - loss: 0.0196 - mae: 0.0897 - val_loss: 0.0162 - val_mae: 0.0779
Epoch 11/30
139/139 [==============================] - 12s 86ms/step - loss: 0.0195 - mae: 0.0894 - val_loss: 0.0162 - val_mae: 0.0768
Epoch 12/30
139/139 [==============================] - 12s 88ms/step - loss: 0.0193 - mae: 0.0891 - val_loss: 0.0162 - val_mae: 0.0768
Epoch 13/30
139/139 [==============================] - 12s 87ms/step - loss: 0.0192 - mae: 0.0886 - val_loss: 0.0162 - val_mae: 0.0763
Epoch 14/30
139/139 [==============================] - 14s 99ms/step - loss: 0.0191 - mae: 0.0885 - val_loss: 0.0162 - val_mae: 0.0770
Epoch 15/30
139/139 [==============================] - 13s 97ms/step - loss: 0.0189 - mae: 0.0881 - val_loss: 0.0161 - val_mae: 0.0767
Epoch 16/30
139/139 [==============================] - 13s 94ms/step - loss: 0.0188 - mae: 0.0878 - val_loss: 0.0162 - val_mae: 0.0786
Epoch 17/30
139/139 [==============================] - 13s 93ms/step - loss: 0.0187 - mae: 0.0876 - val_loss: 0.0162 - val_mae: 0.0774
Epoch 18/30
139/139 [==============================] - 14s 99ms/step - loss: 0.0186 - mae: 0.0873 - val_loss: 0.0161 - val_mae: 0.0769
Epoch 19/30
139/139 [==============================] - 13s 95ms/step - loss: 0.0185 - mae: 0.0870 - val_loss: 0.0159 - val_mae: 0.0764
Epoch 20/30
139/139 [==============================] - 14s 99ms/step - loss: 0.0184 - mae: 0.0868 - val_loss: 0.0161 - val_mae: 0.0768
Epoch 21/30
139/139 [==============================] - 13s 91ms/step - loss: 0.0183 - mae: 0.0866 - val_loss: 0.0161 - val_mae: 0.0769
Epoch 22/30
139/139 [==============================] - 12s 85ms/step - loss: 0.0182 - mae: 0.0864 - val_loss: 0.0160 - val_mae: 0.0764
Epoch 23/30
139/139 [==============================] - 12s 85ms/step - loss: 0.0181 - mae: 0.0862 - val_loss: 0.0160 - val_mae: 0.0767
Epoch 24/30
139/139 [==============================] - 13s 92ms/step - loss: 0.0180 - mae: 0.0860 - val_loss: 0.0161 - val_mae: 0.0769
Epoch 25/30
139/139 [==============================] - 12s 89ms/step - loss: 0.0180 - mae: 0.0859 - val_loss: 0.0160 - val_mae: 0.0768
Epoch 26/30
139/139 [==============================] - 14s 100ms/step - loss: 0.0179 - mae: 0.0856 - val_loss: 0.0161 - val_mae: 0.0766
Epoch 27/30
139/139 [==============================] - 14s 99ms/step - loss: 0.0178 - mae: 0.0855 - val_loss: 0.0161 - val_mae: 0.0777
Epoch 28/30
139/139 [==============================] - 14s 100ms/step - loss: 0.0177 - mae: 0.0853 - val_loss: 0.0161 - val_mae: 0.0774
Epoch 29/30
139/139 [==============================] - 14s 99ms/step - loss: 0.0176 - mae: 0.0851 - val_loss: 0.0161 - val_mae: 0.0762
Epoch 30/30
139/139 [==============================] - 14s 98ms/step - loss: 0.0175 - mae: 0.0850 - val_loss: 0.0161 - val_mae: 0.0764
In [ ]:
Seq_train = Model_3.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_3.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
308/308 [==============================] - 3s 10ms/step
[[0.27353722 0.22235137 0.11924484 ... 0.09030947 0.21714443 0.15713792]
 [0.27698985 0.20805877 0.12203798 ... 0.07588496 0.21778339 0.14389536]
 [0.30191922 0.19840896 0.09490469 ... 0.04296529 0.18183883 0.09257507]
 ...
 [0.59218717 0.35601154 0.12865077 ... 0.11208278 0.23783398 0.23265839]
 [0.32815158 0.27847004 0.11103028 ... 0.10393861 0.22936368 0.19641408]
 [0.29926986 0.26765394 0.10675882 ... 0.09249008 0.22138035 0.17555888]] (9840, 48, 121)
74/74 [==============================] - 1s 11ms/step
[[0.18086421 0.08478816 0.09965246 ... 0.04774038 0.20904157 0.09201072]
 [0.16954157 0.07572413 0.10235085 ... 0.04930423 0.2054002  0.08915348]
 [0.3474395  0.08292878 0.09282088 ... 0.02855679 0.19409963 0.06573026]
 ...
 [0.25839117 0.11097807 0.16000354 ... 0.10260759 0.24862647 0.17750622]
 [0.3127563  0.09244774 0.13501032 ... 0.06932951 0.23404184 0.14152505]
 [0.1829294  0.10213682 0.1284194  ... 0.06529852 0.23978904 0.14243035]] (2352, 48, 121)
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY_RMSE, testingtrain_C1))
#print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

#trainMAE = np.mean(mae(trainY[:48,1,1], testingtrain_C2[:48,1,1]))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.17 RMSE
Test Score: 0.11 MAE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(Seq_test[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,47,1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,47,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
lr_decay = callbacks.LearningRateScheduler(schedule=lambda epoch: 0.001 * (0.80 ** epoch))
In [ ]:
model = Sequential()
model.add(LSTM(200, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))
#model.add(Dropout(.01))


model.add(Dense(trainX.shape[2]))
#sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9),   'optimizers.Adam(lr=0.01)'
model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
#, callbacks=[lr_decay]
Model: "sequential_15"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_7 (LSTM)                (None, 48, 200)           257600    
_________________________________________________________________
dense_31 (Dense)             (None, 48, 121)           24321     
=================================================================
Total params: 281,921
Trainable params: 281,921
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_C3 = model.fit(trainX,trainY, epochs=30, batch_size = 64,
                       validation_split = 0.10, callbacks=[lr_decay])
Epoch 1/30
139/139 [==============================] - 14s 103ms/step - loss: 0.0241 - mae: 0.1011 - val_loss: 0.0170 - val_mae: 0.0829
Epoch 2/30
139/139 [==============================] - 14s 102ms/step - loss: 0.0193 - mae: 0.0892 - val_loss: 0.0165 - val_mae: 0.0819
Epoch 3/30
139/139 [==============================] - 12s 83ms/step - loss: 0.0182 - mae: 0.0863 - val_loss: 0.0164 - val_mae: 0.0809
Epoch 4/30
139/139 [==============================] - 14s 97ms/step - loss: 0.0176 - mae: 0.0848 - val_loss: 0.0164 - val_mae: 0.0815
Epoch 5/30
139/139 [==============================] - 15s 105ms/step - loss: 0.0172 - mae: 0.0838 - val_loss: 0.0165 - val_mae: 0.0811
Epoch 6/30
139/139 [==============================] - 14s 102ms/step - loss: 0.0169 - mae: 0.0830 - val_loss: 0.0165 - val_mae: 0.0813
Epoch 7/30
139/139 [==============================] - 14s 104ms/step - loss: 0.0167 - mae: 0.0825 - val_loss: 0.0165 - val_mae: 0.0814
Epoch 8/30
139/139 [==============================] - 17s 123ms/step - loss: 0.0165 - mae: 0.0820 - val_loss: 0.0164 - val_mae: 0.0811
Epoch 9/30
139/139 [==============================] - 13s 92ms/step - loss: 0.0164 - mae: 0.0817 - val_loss: 0.0165 - val_mae: 0.0808
Epoch 10/30
139/139 [==============================] - 12s 86ms/step - loss: 0.0162 - mae: 0.0814 - val_loss: 0.0165 - val_mae: 0.0811
Epoch 11/30
139/139 [==============================] - 14s 103ms/step - loss: 0.0161 - mae: 0.0811 - val_loss: 0.0166 - val_mae: 0.0811
Epoch 12/30
139/139 [==============================] - 14s 101ms/step - loss: 0.0161 - mae: 0.0810 - val_loss: 0.0166 - val_mae: 0.0815
Epoch 13/30
139/139 [==============================] - 14s 98ms/step - loss: 0.0160 - mae: 0.0808 - val_loss: 0.0166 - val_mae: 0.0816
Epoch 14/30
139/139 [==============================] - 14s 101ms/step - loss: 0.0160 - mae: 0.0807 - val_loss: 0.0166 - val_mae: 0.0812
Epoch 15/30
139/139 [==============================] - 15s 105ms/step - loss: 0.0159 - mae: 0.0806 - val_loss: 0.0166 - val_mae: 0.0814
Epoch 16/30
139/139 [==============================] - 16s 113ms/step - loss: 0.0159 - mae: 0.0805 - val_loss: 0.0166 - val_mae: 0.0813
Epoch 17/30
139/139 [==============================] - 15s 105ms/step - loss: 0.0159 - mae: 0.0805 - val_loss: 0.0166 - val_mae: 0.0815
Epoch 18/30
139/139 [==============================] - 15s 108ms/step - loss: 0.0158 - mae: 0.0805 - val_loss: 0.0166 - val_mae: 0.0813
Epoch 19/30
139/139 [==============================] - 15s 106ms/step - loss: 0.0158 - mae: 0.0804 - val_loss: 0.0166 - val_mae: 0.0814
Epoch 20/30
139/139 [==============================] - 15s 106ms/step - loss: 0.0158 - mae: 0.0804 - val_loss: 0.0166 - val_mae: 0.0814
Epoch 21/30
139/139 [==============================] - 16s 113ms/step - loss: 0.0158 - mae: 0.0803 - val_loss: 0.0166 - val_mae: 0.0814
Epoch 22/30
139/139 [==============================] - 15s 111ms/step - loss: 0.0158 - mae: 0.0803 - val_loss: 0.0166 - val_mae: 0.0814
Epoch 23/30
139/139 [==============================] - 15s 111ms/step - loss: 0.0158 - mae: 0.0803 - val_loss: 0.0166 - val_mae: 0.0814
Epoch 24/30
139/139 [==============================] - 15s 111ms/step - loss: 0.0158 - mae: 0.0803 - val_loss: 0.0166 - val_mae: 0.0814
Epoch 25/30
139/139 [==============================] - 15s 109ms/step - loss: 0.0158 - mae: 0.0803 - val_loss: 0.0166 - val_mae: 0.0813
Epoch 26/30
139/139 [==============================] - 15s 110ms/step - loss: 0.0158 - mae: 0.0803 - val_loss: 0.0166 - val_mae: 0.0814
Epoch 27/30
139/139 [==============================] - 15s 105ms/step - loss: 0.0158 - mae: 0.0803 - val_loss: 0.0166 - val_mae: 0.0814
Epoch 28/30
139/139 [==============================] - 15s 108ms/step - loss: 0.0158 - mae: 0.0803 - val_loss: 0.0166 - val_mae: 0.0814
Epoch 29/30
139/139 [==============================] - 15s 109ms/step - loss: 0.0158 - mae: 0.0803 - val_loss: 0.0166 - val_mae: 0.0814
Epoch 30/30
139/139 [==============================] - 15s 108ms/step - loss: 0.0158 - mae: 0.0802 - val_loss: 0.0166 - val_mae: 0.0814
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_C3.history['loss'], label='train')
plt.plot(history_C3.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_C3 = model.predict(trainX, verbose = 1)
print(testingtrain_C3[0], testingtrain_C3.shape)

testingtest_C3 = model.predict(testX, verbose = 1)
print(testingtest_C3[0], testingtest_C3.shape)
308/308 [==============================] - 7s 23ms/step
[[0.4069072  0.16715577 0.16017939 ... 0.10997048 0.20091784 0.1463755 ]
 [0.35330594 0.16684483 0.16515994 ... 0.10788316 0.2366654  0.14150701]
 [0.24873096 0.1889876  0.14164582 ... 0.09074336 0.21716085 0.12632656]
 ...
 [0.6141548  0.36965352 0.14080018 ... 0.15465575 0.18609044 0.22139731]
 [0.30988735 0.29965663 0.12032406 ... 0.09694483 0.17298591 0.2218472 ]
 [0.24498408 0.2306185  0.11238103 ... 0.07804973 0.18270162 0.18827921]] (9840, 48, 121)
74/74 [==============================] - 2s 22ms/step
[[0.28273404 0.0981463  0.11865771 ... 0.04956917 0.17004544 0.08622462]
 [0.27971077 0.11394286 0.11323122 ... 0.04309981 0.19194354 0.07583451]
 [0.33155745 0.10338895 0.10427654 ... 0.03418354 0.20439732 0.0805179 ]
 ...
 [0.32455456 0.13051158 0.16563718 ... 0.1207829  0.21879396 0.1844056 ]
 [0.5063078  0.13148178 0.1719914  ... 0.10525349 0.23758015 0.16705379]
 [0.33658063 0.13474044 0.15082167 ... 0.07755177 0.23746417 0.14746428]] (2352, 48, 121)
In [ ]:
trainScore = math.sqrt(mean_squared_error(trainY[:,1,:], testingtrain_C3[:,1,:]))
print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_C3[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))
Train Score: 0.14 RMSE
Test Score: 0.17 RMSE
In [ ]:
trainMAE = np.mean(mae(trainY[:,1,:], testingtrain_C3[:,1,:]))
print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], testingtest_C3[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Train Score: 0.09 MAE
Test Score: 0.11 MAE
In [ ]:
plt.imshow(trainY[:47,47,:])
plt.show()
plt.imshow(testingtrain_C3[:47,47,:])
plt.show()
In [1]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,:1], marker='.', label="actual")
plt.plot(aa, testingtest_C3[:,1,:1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
#################.         WEEKEND            ##################################
In [ ]:
%%time
km_4_wkn = KMeans(4).fit(X_wkn)
pd.Series.value_counts(km_4_wkn.labels_)
CPU times: user 1.49 s, sys: 44.1 ms, total: 1.53 s
Wall time: 497 ms
Out[ ]:
0    117
2     98
1     48
3     36
dtype: int64
In [ ]:
cls_list = km_4_wkn.labels_
X_wkn['cluster'] = cls_list
In [ ]:
X_wkn_C1 = X_wkn[X_wkn.cluster == 0]
X_wkn_C2 = X_wkn[X_wkn.cluster == 1]
X_wkn_C3 = X_wkn[X_wkn.cluster == 2]
X_wkn_C4 = X_wkn[X_wkn.cluster == 3]
In [ ]:
X_wkn_C1 = X_wkn_C1.drop(columns = 'cluster')
X_wkn_C1 = X_wkn_C1.transpose()
X_wkn_C1.head()
Out[ ]:
1 3 4 5 7 9 10 14 15 18 ... 282 283 284 285 291 292 295 296 297 298
Datetime
2012-07-01 00:00:00 0.855 0.965 0.084 0.108 0.087 0.025 0.020 0.191 0.066 0.184 ... 0.313 0.055 0.075 0.048 0.221 0.075 0.075 0.030 0.141 0.157
2012-07-01 00:30:00 0.786 0.927 0.084 0.098 0.098 0.022 0.036 0.176 0.067 0.128 ... 0.106 0.042 0.081 0.042 0.231 0.075 0.084 0.053 0.257 0.127
2012-07-01 01:00:00 0.604 1.359 0.082 0.105 0.064 0.011 0.009 0.199 0.052 0.096 ... 0.119 0.026 0.116 0.790 0.247 0.063 0.054 0.044 0.197 0.122
2012-07-01 01:30:00 0.544 0.060 0.084 0.075 0.089 0.023 0.045 0.164 0.057 0.098 ... 0.088 0.074 0.083 1.146 0.193 0.444 0.062 0.029 0.273 0.120
2012-07-01 02:00:00 0.597 0.059 0.086 0.102 0.067 0.024 0.099 0.190 0.066 0.097 ... 0.137 0.026 0.095 1.049 0.141 0.081 0.074 0.044 0.206 0.106

5 rows × 117 columns

In [ ]:
X_wkn_C1 = X_wkn_C1.values
########### Ranging the values from 0 to 1
#scaler = MinMaxScaler(feature_range=(0, 1))
#Xtrain = scaler.fit(Xtrain.reshape(0, 1))
#X_wk_C1 = scaler.fit_transform(X_wk_C1)
#####Avoiding Outliers
cap = np.percentile(X_wkn_C1, 97)   
X_wkn_C1[X_wkn_C1 > cap] = cap
In [ ]:
training_size=int(X_wkn_C1.shape[0]*0.80)

test_size=(X_wkn_C1.shape[0])-training_size

train,test=X_wkn_C1[0:training_size],X_wkn_C1[training_size:(X_wkn_C1.shape[0])]
In [ ]:
def get_batches(data, input_interval, target_interval, output_step_offset):
  batched_data_x = []
  batched_data_y = []
  for i in range(input_interval, (len(data) - target_interval - output_step_offset)):
    batched_data_x.append(data[i-input_interval:i])
    batched_data_y.append(data[i+output_step_offset:i+target_interval+output_step_offset])
  batched_data_x, batched_data_y = np.array(batched_data_x), np.array(batched_data_y)
  return batched_data_x, batched_data_y
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)
In [ ]:
print(trainX.shape, trainY.shape)
print(testX.shape, testY.shape)
(3888, 48, 117) (3888, 48, 117)
(864, 48, 117) (864, 48, 117)
In [ ]:
                             ###Building a sequential network:
Model_1 = models.Sequential()
Model_1.add(layers.Dense(400, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_1.add(Dropout(0.2))
Model_1.add(BatchNormalization())

Model_1.add(layers.Dense(200, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_1.add(Dropout(0.2))
Model_1.add(BatchNormalization())

Model_1.add((Dense(trainX.shape[2])))
Model_1.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_1.summary()
Model: "sequential_16"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_32 (Dense)             (None, 48, 400)           47200     
_________________________________________________________________
dropout_19 (Dropout)         (None, 48, 400)           0         
_________________________________________________________________
batch_normalization_16 (Batc (None, 48, 400)           1600      
_________________________________________________________________
dense_33 (Dense)             (None, 48, 200)           80200     
_________________________________________________________________
dropout_20 (Dropout)         (None, 48, 200)           0         
_________________________________________________________________
batch_normalization_17 (Batc (None, 48, 200)           800       
_________________________________________________________________
dense_34 (Dense)             (None, 48, 117)           23517     
=================================================================
Total params: 153,317
Trainable params: 152,117
Non-trainable params: 1,200
_________________________________________________________________
In [ ]:
model_train = Model_1.fit(trainX,trainY, epochs=30, validation_split = 0.10, batch_size = 64)
Epoch 1/30
55/55 [==============================] - 3s 57ms/step - loss: 0.4569 - mae: 0.4707 - val_loss: 0.0298 - val_mae: 0.1065
Epoch 2/30
55/55 [==============================] - 3s 58ms/step - loss: 0.1334 - mae: 0.2254 - val_loss: 0.0230 - val_mae: 0.1025
Epoch 3/30
55/55 [==============================] - 3s 60ms/step - loss: 0.0781 - mae: 0.1688 - val_loss: 0.0226 - val_mae: 0.1003
Epoch 4/30
55/55 [==============================] - 3s 57ms/step - loss: 0.0513 - mae: 0.1401 - val_loss: 0.0221 - val_mae: 0.0981
Epoch 5/30
55/55 [==============================] - 3s 55ms/step - loss: 0.0375 - mae: 0.1231 - val_loss: 0.0217 - val_mae: 0.0981
Epoch 6/30
55/55 [==============================] - 3s 55ms/step - loss: 0.0304 - mae: 0.1127 - val_loss: 0.0212 - val_mae: 0.0951
Epoch 7/30
55/55 [==============================] - 3s 57ms/step - loss: 0.0268 - mae: 0.1062 - val_loss: 0.0209 - val_mae: 0.0937
Epoch 8/30
55/55 [==============================] - 3s 56ms/step - loss: 0.0249 - mae: 0.1023 - val_loss: 0.0206 - val_mae: 0.0924
Epoch 9/30
55/55 [==============================] - 3s 59ms/step - loss: 0.0238 - mae: 0.1001 - val_loss: 0.0204 - val_mae: 0.0908
Epoch 10/30
55/55 [==============================] - 4s 69ms/step - loss: 0.0232 - mae: 0.0987 - val_loss: 0.0203 - val_mae: 0.0899
Epoch 11/30
55/55 [==============================] - 4s 64ms/step - loss: 0.0227 - mae: 0.0977 - val_loss: 0.0203 - val_mae: 0.0897
Epoch 12/30
55/55 [==============================] - 3s 59ms/step - loss: 0.0223 - mae: 0.0970 - val_loss: 0.0203 - val_mae: 0.0891
Epoch 13/30
55/55 [==============================] - 4s 68ms/step - loss: 0.0219 - mae: 0.0962 - val_loss: 0.0204 - val_mae: 0.0885
Epoch 14/30
55/55 [==============================] - 4s 67ms/step - loss: 0.0216 - mae: 0.0957 - val_loss: 0.0203 - val_mae: 0.0879
Epoch 15/30
55/55 [==============================] - 4s 69ms/step - loss: 0.0214 - mae: 0.0952 - val_loss: 0.0203 - val_mae: 0.0881
Epoch 16/30
55/55 [==============================] - 4s 70ms/step - loss: 0.0210 - mae: 0.0945 - val_loss: 0.0203 - val_mae: 0.0891
Epoch 17/30
55/55 [==============================] - 4s 74ms/step - loss: 0.0207 - mae: 0.0939 - val_loss: 0.0204 - val_mae: 0.0875
Epoch 18/30
55/55 [==============================] - 4s 67ms/step - loss: 0.0205 - mae: 0.0934 - val_loss: 0.0204 - val_mae: 0.0884
Epoch 19/30
55/55 [==============================] - 4s 65ms/step - loss: 0.0202 - mae: 0.0928 - val_loss: 0.0203 - val_mae: 0.0879
Epoch 20/30
55/55 [==============================] - 4s 76ms/step - loss: 0.0199 - mae: 0.0921 - val_loss: 0.0204 - val_mae: 0.0890
Epoch 21/30
55/55 [==============================] - 4s 66ms/step - loss: 0.0196 - mae: 0.0916 - val_loss: 0.0204 - val_mae: 0.0887
Epoch 22/30
55/55 [==============================] - 4s 73ms/step - loss: 0.0194 - mae: 0.0912 - val_loss: 0.0204 - val_mae: 0.0885
Epoch 23/30
55/55 [==============================] - 4s 69ms/step - loss: 0.0192 - mae: 0.0907 - val_loss: 0.0204 - val_mae: 0.0903
Epoch 24/30
55/55 [==============================] - 4s 70ms/step - loss: 0.0189 - mae: 0.0900 - val_loss: 0.0204 - val_mae: 0.0887
Epoch 25/30
55/55 [==============================] - 4s 68ms/step - loss: 0.0187 - mae: 0.0898 - val_loss: 0.0205 - val_mae: 0.0890
Epoch 26/30
55/55 [==============================] - 4s 67ms/step - loss: 0.0185 - mae: 0.0893 - val_loss: 0.0205 - val_mae: 0.0894
Epoch 27/30
55/55 [==============================] - 4s 70ms/step - loss: 0.0183 - mae: 0.0889 - val_loss: 0.0205 - val_mae: 0.0884
Epoch 28/30
55/55 [==============================] - 4s 73ms/step - loss: 0.0181 - mae: 0.0884 - val_loss: 0.0206 - val_mae: 0.0903
Epoch 29/30
55/55 [==============================] - 4s 74ms/step - loss: 0.0180 - mae: 0.0882 - val_loss: 0.0205 - val_mae: 0.0885
Epoch 30/30
55/55 [==============================] - 4s 76ms/step - loss: 0.0177 - mae: 0.0876 - val_loss: 0.0204 - val_mae: 0.0894
In [ ]:
Seq_train = Model_1.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_1.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
122/122 [==============================] - 1s 7ms/step
[[0.2986089  0.09945919 0.12806334 ... 0.08120063 0.21536234 0.13795952]
 [0.33998987 0.06907374 0.10949329 ... 0.0620617  0.21149638 0.11047769]
 [0.31623524 0.05554292 0.11668798 ... 0.05218457 0.20863134 0.10925641]
 ...
 [0.4951867  0.1278857  0.06601815 ... 0.07493007 0.28257927 0.176505  ]
 [0.27068126 0.4298092  0.12962072 ... 0.12585661 0.25682288 0.21092206]
 [0.17090839 0.3996947  0.15969864 ... 0.18836817 0.22922207 0.20629703]] (3888, 48, 117)
27/27 [==============================] - 0s 8ms/step
[[0.17767513 0.06041571 0.13110934 ... 0.06011301 0.2346696  0.15407142]
 [0.14202216 0.06062292 0.13040552 ... 0.08885814 0.21940514 0.14742158]
 [0.12239103 0.05679756 0.12929092 ... 0.07954407 0.20606694 0.13965592]
 ...
 [0.4117313  0.10123013 0.16380219 ... 0.06645621 0.23275429 0.20184666]
 [0.6548185  0.11148451 0.16410822 ... 0.07194567 0.23106444 0.19910222]
 [0.51659775 0.09875451 0.14972465 ... 0.08253226 0.22345518 0.16330162]] (864, 48, 117)
In [ ]:
testScore = math.sqrt(mean_squared_error(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

testMAE = np.mean(mae(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.19 RMSE
Test Score: 0.12 MAE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(Seq_test[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,47,1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,47,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
lr_decay = callbacks.LearningRateScheduler(schedule=lambda epoch: 0.001 * (0.80 ** epoch))
In [ ]:
model = Sequential()
model.add(LSTM(200, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))
#model.add(Dropout(.01))

#model.add(Dense(25))
#model.add(LSTM(50, activation='relu', return_sequences = True))
#model.add(Dropout(.1))

model.add(Dense(trainX.shape[2]))

model.compile(optimizer = 'adam', metrics = 'mae', loss='mse')
model.summary()
Model: "sequential_19"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_10 (LSTM)               (None, 48, 200)           254400    
_________________________________________________________________
dense_37 (Dense)             (None, 48, 117)           23517     
=================================================================
Total params: 277,917
Trainable params: 277,917
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_C1 = model.fit(trainX,trainY, epochs=30,
                       validation_split = 0.10, batch_size=64, callbacks=[lr_decay])
#, callbacks=[lr_decay]
Epoch 1/30
55/55 [==============================] - 6s 114ms/step - loss: 0.0313 - mae: 0.1177 - val_loss: 0.0221 - val_mae: 0.0969
Epoch 2/30
55/55 [==============================] - 6s 105ms/step - loss: 0.0240 - mae: 0.1019 - val_loss: 0.0211 - val_mae: 0.0938
Epoch 3/30
55/55 [==============================] - 6s 110ms/step - loss: 0.0224 - mae: 0.0978 - val_loss: 0.0208 - val_mae: 0.0921
Epoch 4/30
55/55 [==============================] - 6s 111ms/step - loss: 0.0214 - mae: 0.0955 - val_loss: 0.0207 - val_mae: 0.0922
Epoch 5/30
55/55 [==============================] - 5s 99ms/step - loss: 0.0208 - mae: 0.0940 - val_loss: 0.0207 - val_mae: 0.0917
Epoch 6/30
55/55 [==============================] - 5s 100ms/step - loss: 0.0203 - mae: 0.0929 - val_loss: 0.0207 - val_mae: 0.0914
Epoch 7/30
55/55 [==============================] - 6s 100ms/step - loss: 0.0199 - mae: 0.0922 - val_loss: 0.0206 - val_mae: 0.0914
Epoch 8/30
55/55 [==============================] - 7s 125ms/step - loss: 0.0196 - mae: 0.0915 - val_loss: 0.0206 - val_mae: 0.0916
Epoch 9/30
55/55 [==============================] - 6s 102ms/step - loss: 0.0194 - mae: 0.0910 - val_loss: 0.0206 - val_mae: 0.0917
Epoch 10/30
55/55 [==============================] - 6s 101ms/step - loss: 0.0192 - mae: 0.0906 - val_loss: 0.0207 - val_mae: 0.0916
Epoch 11/30
55/55 [==============================] - 6s 102ms/step - loss: 0.0191 - mae: 0.0903 - val_loss: 0.0207 - val_mae: 0.0914
Epoch 12/30
55/55 [==============================] - 6s 103ms/step - loss: 0.0190 - mae: 0.0900 - val_loss: 0.0207 - val_mae: 0.0915
Epoch 13/30
55/55 [==============================] - 6s 106ms/step - loss: 0.0189 - mae: 0.0898 - val_loss: 0.0207 - val_mae: 0.0915
Epoch 14/30
55/55 [==============================] - 6s 103ms/step - loss: 0.0188 - mae: 0.0896 - val_loss: 0.0207 - val_mae: 0.0915
Epoch 15/30
55/55 [==============================] - 6s 104ms/step - loss: 0.0188 - mae: 0.0895 - val_loss: 0.0207 - val_mae: 0.0914
Epoch 16/30
55/55 [==============================] - 6s 104ms/step - loss: 0.0187 - mae: 0.0894 - val_loss: 0.0207 - val_mae: 0.0914
Epoch 17/30
55/55 [==============================] - 6s 104ms/step - loss: 0.0187 - mae: 0.0893 - val_loss: 0.0207 - val_mae: 0.0916
Epoch 18/30
55/55 [==============================] - 6s 111ms/step - loss: 0.0187 - mae: 0.0892 - val_loss: 0.0207 - val_mae: 0.0915
Epoch 19/30
55/55 [==============================] - 5s 99ms/step - loss: 0.0186 - mae: 0.0892 - val_loss: 0.0207 - val_mae: 0.0915
Epoch 20/30
55/55 [==============================] - 6s 102ms/step - loss: 0.0186 - mae: 0.0891 - val_loss: 0.0207 - val_mae: 0.0915
Epoch 21/30
55/55 [==============================] - 5s 98ms/step - loss: 0.0186 - mae: 0.0891 - val_loss: 0.0207 - val_mae: 0.0915
Epoch 22/30
55/55 [==============================] - 5s 99ms/step - loss: 0.0186 - mae: 0.0891 - val_loss: 0.0207 - val_mae: 0.0916
Epoch 23/30
55/55 [==============================] - 6s 101ms/step - loss: 0.0186 - mae: 0.0891 - val_loss: 0.0207 - val_mae: 0.0915
Epoch 24/30
55/55 [==============================] - 6s 102ms/step - loss: 0.0186 - mae: 0.0890 - val_loss: 0.0207 - val_mae: 0.0915
Epoch 25/30
55/55 [==============================] - 6s 113ms/step - loss: 0.0186 - mae: 0.0890 - val_loss: 0.0208 - val_mae: 0.0916
Epoch 26/30
55/55 [==============================] - 6s 106ms/step - loss: 0.0185 - mae: 0.0890 - val_loss: 0.0207 - val_mae: 0.0916
Epoch 27/30
55/55 [==============================] - 6s 105ms/step - loss: 0.0185 - mae: 0.0890 - val_loss: 0.0208 - val_mae: 0.0915
Epoch 28/30
55/55 [==============================] - 6s 107ms/step - loss: 0.0185 - mae: 0.0890 - val_loss: 0.0208 - val_mae: 0.0915
Epoch 29/30
55/55 [==============================] - 6s 105ms/step - loss: 0.0185 - mae: 0.0890 - val_loss: 0.0208 - val_mae: 0.0915
Epoch 30/30
55/55 [==============================] - 5s 97ms/step - loss: 0.0185 - mae: 0.0890 - val_loss: 0.0208 - val_mae: 0.0915
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_C1.history['loss'], label='train')
plt.plot(history_C1.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_C1 = model.predict(trainX, verbose = 1)
print(testingtrain_C1[0], testingtrain_C1.shape)

testingtest_C1 = model.predict(testX, verbose = 1)
print(testingtest_C1[0], testingtest_C1.shape)
122/122 [==============================] - 2s 18ms/step
[[0.28979516 0.0353032  0.10709288 ... 0.07504996 0.17925994 0.10235494]
 [0.36420152 0.03706158 0.11356971 ... 0.04483584 0.22175226 0.08994807]
 [0.38343072 0.02927125 0.11397503 ... 0.05096188 0.23121367 0.09860832]
 ...
 [0.40706167 0.16052735 0.0970141  ... 0.12295728 0.2280949  0.23912616]
 [0.25983432 0.1560265  0.10001148 ... 0.09924986 0.23299052 0.18971634]
 [0.19158746 0.14288907 0.07815987 ... 0.10048438 0.22247589 0.16614857]] (3888, 48, 117)
27/27 [==============================] - 1s 21ms/step
[[0.24051249 0.04119528 0.11163003 ... 0.07979775 0.18156809 0.12322158]
 [0.2592879  0.03967054 0.12231255 ... 0.06346746 0.21103358 0.12943564]
 [0.29499826 0.03736248 0.13433881 ... 0.06091271 0.22791433 0.12454586]
 ...
 [0.39909256 0.10764904 0.1486254  ... 0.1061354  0.23696227 0.21288998]
 [0.42418462 0.09731186 0.11324845 ... 0.09021296 0.2405874  0.17463627]
 [0.32438928 0.1078632  0.09428947 ... 0.08847928 0.22654721 0.14721708]] (864, 48, 117)
In [ ]:
trainScore = math.sqrt(mean_squared_error(trainY[:,1,:], testingtrain_C1[:,1,:]))
print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_C1[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))


trainMAE = np.mean(mae(trainY[:,1,:], testingtrain_C1[:,1,:]))
print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], testingtest_C1[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Train Score: 0.15 RMSE
Test Score: 0.18 RMSE
Train Score: 0.10 MAE
Test Score: 0.12 MAE
In [ ]:
def rmse(actual, pred):
    return np.sqrt(((pred - actual) ** 2).mean())
def mae(actual, pred):
    return np.mean(np.abs(actual - pred))
In [ ]:
plt.imshow(trainY[:47,47,:])
plt.show()
plt.imshow(testingtrain_C1[:47,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,:1], marker='.', label="actual")
plt.plot(aa, testingtest_C1[:,1,:1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
X_wkn_C2 = X_wkn_C2.drop(columns = 'cluster')
X_wkn_C2 = X_wkn_C2.transpose()
X_wkn_C2.head()
Out[ ]:
20 21 25 26 33 35 46 55 56 57 ... 242 246 248 251 253 254 267 274 289 294
Datetime
2012-07-01 00:00:00 0.050 0.187 0.038 0.863 0.668 0.277 0.229 0.986 0.139 0.069 ... 1.207 0.526 0.218 1.160 1.029 0.094 0.761 0.251 0.726 0.134
2012-07-01 00:30:00 0.063 0.169 0.088 0.813 0.661 0.276 0.168 0.773 0.125 0.050 ... 1.304 0.572 0.232 1.143 1.085 0.135 1.390 0.159 0.733 0.174
2012-07-01 01:00:00 0.038 0.186 0.031 0.863 0.543 0.279 0.176 0.882 0.122 0.063 ... 1.319 0.574 0.202 2.913 1.023 0.092 1.450 0.298 0.750 0.165
2012-07-01 01:30:00 0.063 0.176 0.100 0.838 0.276 0.305 0.286 0.667 0.133 0.063 ... 1.203 0.633 0.231 2.924 0.937 0.100 1.841 0.267 0.211 0.104
2012-07-01 02:00:00 0.063 0.172 0.025 0.838 0.285 0.285 0.214 0.440 0.124 0.063 ... 1.283 0.584 0.221 2.942 0.676 0.125 1.074 0.199 0.159 0.130

5 rows × 48 columns

In [ ]:
X_wkn_C2 = X_wkn_C2.values
########### Ranging the values from 0 to 1
#scaler = MinMaxScaler(feature_range=(0, 1))
#Xtrain = scaler.fit(Xtrain.reshape(0, 1))
#X_wk_C1 = scaler.fit_transform(X_wk_C1)
#####Avoiding Outliers
cap = np.percentile(X_wkn_C2, 97)   
X_wkn_C2[X_wkn_C2 > cap] = cap
In [ ]:
training_size=int(X_wkn_C2.shape[0]*0.80)

test_size=(X_wkn_C2.shape[0])-training_size

train,test=X_wkn_C2[0:training_size],X_wkn_C2[training_size:(X_wkn_C2.shape[0])]
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)
In [ ]:
print(trainX.shape, trainY.shape)
print(testX.shape, testY.shape)
(3888, 48, 48) (3888, 48, 48)
(864, 48, 48) (864, 48, 48)
In [ ]:
                            ###Building a sequential network:
Model_2 = models.Sequential()
Model_2.add(layers.Dense(400, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_2.add(Dropout(0.5))
Model_2.add(BatchNormalization())

Model_2.add(layers.Dense(200, activation='relu'))
Model_2.add(Dropout(0.5))
Model_2.add(BatchNormalization())

Model_2.add((Dense(trainX.shape[2])))

Model_2.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_2.summary()
Model: "sequential_20"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_38 (Dense)             (None, 48, 400)           19600     
_________________________________________________________________
dropout_23 (Dropout)         (None, 48, 400)           0         
_________________________________________________________________
batch_normalization_18 (Batc (None, 48, 400)           1600      
_________________________________________________________________
dense_39 (Dense)             (None, 48, 200)           80200     
_________________________________________________________________
dropout_24 (Dropout)         (None, 48, 200)           0         
_________________________________________________________________
batch_normalization_19 (Batc (None, 48, 200)           800       
_________________________________________________________________
dense_40 (Dense)             (None, 48, 48)            9648      
=================================================================
Total params: 111,848
Trainable params: 110,648
Non-trainable params: 1,200
_________________________________________________________________
In [ ]:
model_train = Model_2.fit(trainX,trainY, epochs=30, batch_size = 32, validation_split = 0.10)
Epoch 1/30
110/110 [==============================] - 4s 35ms/step - loss: 0.7441 - mae: 0.6246 - val_loss: 0.1100 - val_mae: 0.2380
Epoch 2/30
110/110 [==============================] - 4s 32ms/step - loss: 0.1864 - mae: 0.3041 - val_loss: 0.1048 - val_mae: 0.2323
Epoch 3/30
110/110 [==============================] - 4s 33ms/step - loss: 0.1283 - mae: 0.2513 - val_loss: 0.1009 - val_mae: 0.2239
Epoch 4/30
110/110 [==============================] - 4s 35ms/step - loss: 0.1165 - mae: 0.2395 - val_loss: 0.0982 - val_mae: 0.2145
Epoch 5/30
110/110 [==============================] - 4s 33ms/step - loss: 0.1114 - mae: 0.2339 - val_loss: 0.0979 - val_mae: 0.2146
Epoch 6/30
110/110 [==============================] - 3s 32ms/step - loss: 0.1071 - mae: 0.2292 - val_loss: 0.0953 - val_mae: 0.2066
Epoch 7/30
110/110 [==============================] - 4s 34ms/step - loss: 0.1038 - mae: 0.2256 - val_loss: 0.0968 - val_mae: 0.2100
Epoch 8/30
110/110 [==============================] - 4s 34ms/step - loss: 0.1010 - mae: 0.2224 - val_loss: 0.0975 - val_mae: 0.2098
Epoch 9/30
110/110 [==============================] - 4s 33ms/step - loss: 0.0990 - mae: 0.2200 - val_loss: 0.0963 - val_mae: 0.2068
Epoch 10/30
110/110 [==============================] - 3s 32ms/step - loss: 0.0972 - mae: 0.2178 - val_loss: 0.0984 - val_mae: 0.2120
Epoch 11/30
110/110 [==============================] - 3s 32ms/step - loss: 0.0954 - mae: 0.2158 - val_loss: 0.0965 - val_mae: 0.2100
Epoch 12/30
110/110 [==============================] - 4s 33ms/step - loss: 0.0940 - mae: 0.2141 - val_loss: 0.0963 - val_mae: 0.2096
Epoch 13/30
110/110 [==============================] - 4s 33ms/step - loss: 0.0924 - mae: 0.2125 - val_loss: 0.0984 - val_mae: 0.2129
Epoch 14/30
110/110 [==============================] - 4s 32ms/step - loss: 0.0910 - mae: 0.2108 - val_loss: 0.0995 - val_mae: 0.2148
Epoch 15/30
110/110 [==============================] - 3s 31ms/step - loss: 0.0899 - mae: 0.2095 - val_loss: 0.0960 - val_mae: 0.2074
Epoch 16/30
110/110 [==============================] - 4s 33ms/step - loss: 0.0889 - mae: 0.2085 - val_loss: 0.0975 - val_mae: 0.2102
Epoch 17/30
110/110 [==============================] - 4s 32ms/step - loss: 0.0877 - mae: 0.2068 - val_loss: 0.0978 - val_mae: 0.2118
Epoch 18/30
110/110 [==============================] - 4s 32ms/step - loss: 0.0868 - mae: 0.2059 - val_loss: 0.1003 - val_mae: 0.2186
Epoch 19/30
110/110 [==============================] - 3s 32ms/step - loss: 0.0857 - mae: 0.2048 - val_loss: 0.0968 - val_mae: 0.2097
Epoch 20/30
110/110 [==============================] - 4s 32ms/step - loss: 0.0849 - mae: 0.2038 - val_loss: 0.0949 - val_mae: 0.2062
Epoch 21/30
110/110 [==============================] - 4s 33ms/step - loss: 0.0842 - mae: 0.2029 - val_loss: 0.0961 - val_mae: 0.2117
Epoch 22/30
110/110 [==============================] - 4s 33ms/step - loss: 0.0835 - mae: 0.2021 - val_loss: 0.0975 - val_mae: 0.2140
Epoch 23/30
110/110 [==============================] - 4s 32ms/step - loss: 0.0828 - mae: 0.2014 - val_loss: 0.0944 - val_mae: 0.2070
Epoch 24/30
110/110 [==============================] - 4s 32ms/step - loss: 0.0819 - mae: 0.2003 - val_loss: 0.0958 - val_mae: 0.2099
Epoch 25/30
110/110 [==============================] - 4s 32ms/step - loss: 0.0814 - mae: 0.1998 - val_loss: 0.0961 - val_mae: 0.2090
Epoch 26/30
110/110 [==============================] - 3s 31ms/step - loss: 0.0805 - mae: 0.1987 - val_loss: 0.0965 - val_mae: 0.2124
Epoch 27/30
110/110 [==============================] - 4s 33ms/step - loss: 0.0803 - mae: 0.1984 - val_loss: 0.0947 - val_mae: 0.2059
Epoch 28/30
110/110 [==============================] - 4s 32ms/step - loss: 0.0799 - mae: 0.1979 - val_loss: 0.0947 - val_mae: 0.2047
Epoch 29/30
110/110 [==============================] - 4s 37ms/step - loss: 0.0793 - mae: 0.1972 - val_loss: 0.0936 - val_mae: 0.2080
Epoch 30/30
110/110 [==============================] - 4s 34ms/step - loss: 0.0788 - mae: 0.1967 - val_loss: 0.0944 - val_mae: 0.2057
In [ ]:
Seq_train = Model_2.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_2.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
122/122 [==============================] - 1s 5ms/step
[[0.44966286 0.436657   0.13776588 ... 0.4001467  0.51135963 0.4025679 ]
 [0.39490908 0.46624622 0.11641091 ... 0.37655318 0.57519335 0.4069517 ]
 [0.5305103  0.29734978 0.12064654 ... 0.3539618  0.7000641  0.5485299 ]
 ...
 [0.3119385  0.46399856 0.2413839  ... 0.641664   0.20404364 0.30043986]
 [0.34400904 0.47572407 0.19036108 ... 0.5951812  0.4177737  0.35217753]
 [0.32207868 0.44679156 0.14641285 ... 0.53353417 0.63049734 0.33345464]] (3888, 48, 48)
27/27 [==============================] - 0s 6ms/step
[[0.3945129  0.5515882  0.22128738 ... 0.39356205 0.64579535 0.5000187 ]
 [0.33286834 0.54894966 0.16382106 ... 0.3611811  0.74688935 0.51358825]
 [0.33039042 0.42721385 0.1593431  ... 0.2947872  0.78120553 0.6829271 ]
 ...
 [0.2686646  0.5842144  0.16054118 ... 0.41018933 0.25797567 0.404441  ]
 [0.31786227 0.70593536 0.1351417  ... 0.45636743 0.45313513 0.42375275]
 [0.3603599  0.63850474 0.1416134  ... 0.41797975 0.61846715 0.47381747]] (864, 48, 48)
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY_RMSE, testingtrain_C1))
#print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:48,1,1], Seq_test[:48,1,1]))
print('Test Score: %.2f RMSE' % (testScore))

#trainMAE = np.mean(mae(trainY[:48,1,1], testingtrain_C2[:48,1,1]))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:48,1,1], Seq_test[:48,1,1]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.40 RMSE
Test Score: 0.30 MAE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(Seq_test[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,47,1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,47,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
model = Sequential()
model.add(LSTM(100, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))
model.add(Dropout(0.2))
model.add(Dense(trainX.shape[2]))
#sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9),   'optimizers.Adam(lr=0.01)'
model.compile(optimizer = 'adam', metrics = 'mae', loss='mse')
model.summary()
Model: "sequential_22"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_12 (LSTM)               (None, 48, 100)           59600     
_________________________________________________________________
dropout_25 (Dropout)         (None, 48, 100)           0         
_________________________________________________________________
dense_42 (Dense)             (None, 48, 48)            4848      
=================================================================
Total params: 64,448
Trainable params: 64,448
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_C2 = model.fit(trainX,trainY, epochs=30, batch_size = 32,
                       validation_split = 0.10, callbacks=[lr_decay])
Epoch 1/30
110/110 [==============================] - 4s 38ms/step - loss: 0.1590 - mae: 0.2818 - val_loss: 0.1108 - val_mae: 0.2367
Epoch 2/30
110/110 [==============================] - 4s 40ms/step - loss: 0.1199 - mae: 0.2460 - val_loss: 0.1119 - val_mae: 0.2398
Epoch 3/30
110/110 [==============================] - 4s 36ms/step - loss: 0.1101 - mae: 0.2346 - val_loss: 0.1092 - val_mae: 0.2367
Epoch 4/30
110/110 [==============================] - 4s 36ms/step - loss: 0.1044 - mae: 0.2277 - val_loss: 0.1068 - val_mae: 0.2311
Epoch 5/30
110/110 [==============================] - 4s 34ms/step - loss: 0.1008 - mae: 0.2233 - val_loss: 0.1037 - val_mae: 0.2263
Epoch 6/30
110/110 [==============================] - 4s 37ms/step - loss: 0.0983 - mae: 0.2203 - val_loss: 0.1044 - val_mae: 0.2282
Epoch 7/30
110/110 [==============================] - 4s 35ms/step - loss: 0.0965 - mae: 0.2182 - val_loss: 0.1034 - val_mae: 0.2268
Epoch 8/30
110/110 [==============================] - 4s 36ms/step - loss: 0.0952 - mae: 0.2165 - val_loss: 0.1027 - val_mae: 0.2252
Epoch 9/30
110/110 [==============================] - 4s 36ms/step - loss: 0.0942 - mae: 0.2152 - val_loss: 0.1028 - val_mae: 0.2254
Epoch 10/30
110/110 [==============================] - 4s 36ms/step - loss: 0.0934 - mae: 0.2142 - val_loss: 0.1038 - val_mae: 0.2274
Epoch 11/30
110/110 [==============================] - 4s 33ms/step - loss: 0.0927 - mae: 0.2134 - val_loss: 0.1030 - val_mae: 0.2260
Epoch 12/30
110/110 [==============================] - 4s 33ms/step - loss: 0.0923 - mae: 0.2128 - val_loss: 0.1035 - val_mae: 0.2268
Epoch 13/30
110/110 [==============================] - 4s 33ms/step - loss: 0.0918 - mae: 0.2123 - val_loss: 0.1032 - val_mae: 0.2257
Epoch 14/30
110/110 [==============================] - 4s 34ms/step - loss: 0.0915 - mae: 0.2120 - val_loss: 0.1035 - val_mae: 0.2266
Epoch 15/30
110/110 [==============================] - 4s 33ms/step - loss: 0.0912 - mae: 0.2116 - val_loss: 0.1033 - val_mae: 0.2262
Epoch 16/30
110/110 [==============================] - 4s 35ms/step - loss: 0.0910 - mae: 0.2114 - val_loss: 0.1036 - val_mae: 0.2268
Epoch 17/30
110/110 [==============================] - 4s 33ms/step - loss: 0.0908 - mae: 0.2111 - val_loss: 0.1032 - val_mae: 0.2261
Epoch 18/30
110/110 [==============================] - 4s 33ms/step - loss: 0.0906 - mae: 0.2109 - val_loss: 0.1032 - val_mae: 0.2259
Epoch 19/30
110/110 [==============================] - 4s 33ms/step - loss: 0.0905 - mae: 0.2108 - val_loss: 0.1034 - val_mae: 0.2263
Epoch 20/30
110/110 [==============================] - 4s 33ms/step - loss: 0.0905 - mae: 0.2107 - val_loss: 0.1032 - val_mae: 0.2260
Epoch 21/30
110/110 [==============================] - 4s 32ms/step - loss: 0.0904 - mae: 0.2105 - val_loss: 0.1034 - val_mae: 0.2263
Epoch 22/30
110/110 [==============================] - 3s 32ms/step - loss: 0.0903 - mae: 0.2104 - val_loss: 0.1035 - val_mae: 0.2265
Epoch 23/30
110/110 [==============================] - 4s 32ms/step - loss: 0.0903 - mae: 0.2105 - val_loss: 0.1031 - val_mae: 0.2257
Epoch 24/30
110/110 [==============================] - 4s 34ms/step - loss: 0.0902 - mae: 0.2103 - val_loss: 0.1033 - val_mae: 0.2261
Epoch 25/30
110/110 [==============================] - 4s 33ms/step - loss: 0.0902 - mae: 0.2104 - val_loss: 0.1035 - val_mae: 0.2263
Epoch 26/30
110/110 [==============================] - 4s 32ms/step - loss: 0.0901 - mae: 0.2103 - val_loss: 0.1034 - val_mae: 0.2261
Epoch 27/30
110/110 [==============================] - 4s 32ms/step - loss: 0.0901 - mae: 0.2103 - val_loss: 0.1034 - val_mae: 0.2263
Epoch 28/30
110/110 [==============================] - 4s 32ms/step - loss: 0.0901 - mae: 0.2103 - val_loss: 0.1034 - val_mae: 0.2261
Epoch 29/30
110/110 [==============================] - 4s 33ms/step - loss: 0.0901 - mae: 0.2102 - val_loss: 0.1034 - val_mae: 0.2262
Epoch 30/30
110/110 [==============================] - 4s 32ms/step - loss: 0.0901 - mae: 0.2103 - val_loss: 0.1033 - val_mae: 0.2261
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_C2.history['loss'], label='train')
plt.plot(history_C2.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_C2 = model.predict(trainX, verbose = 1)
print(testingtrain_C2[0], testingtrain_C2.shape)

testingtest_C2 = model.predict(testX, verbose = 1)
print(testingtest_C2[0], testingtest_C2.shape)
122/122 [==============================] - 1s 11ms/step
[[0.40755028 0.43762478 0.21015893 ... 0.4233815  0.35452598 0.30635726]
 [0.40976486 0.42693633 0.18931814 ... 0.4380641  0.41991627 0.31919557]
 [0.43207613 0.35857743 0.15857272 ... 0.41828573 0.5520713  0.30257374]
 ...
 [0.359171   0.56884426 0.29520518 ... 0.70929253 0.34612703 0.2727503 ]
 [0.3289282  0.6042862  0.18320501 ... 0.61630243 0.49300158 0.23854837]
 [0.38107356 0.5847286  0.13477749 ... 0.5628966  0.6487729  0.25046402]] (3888, 48, 48)
27/27 [==============================] - 0s 17ms/step
[[0.38622454 0.450015   0.18412526 ... 0.38069558 0.40767634 0.3802338 ]
 [0.49388447 0.45820504 0.25213632 ... 0.40903002 0.51510143 0.45717072]
 [0.53924245 0.41065708 0.25573456 ... 0.3899691  0.6199733  0.43667454]
 ...
 [0.3299394  0.50545114 0.20892103 ... 0.47672188 0.26970917 0.49348035]
 [0.39949355 0.627305   0.2235834  ... 0.46076614 0.42432874 0.49477944]
 [0.4629786  0.6612797  0.2002185  ... 0.40139383 0.6039893  0.49740642]] (864, 48, 48)
In [ ]:
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_C2[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))


testMAE = np.mean(mae(testY[:,1,:], testingtest_C2[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.36 RMSE
Test Score: 0.26 MAE
In [ ]:
plt.imshow(trainY[:47,47,:])
plt.show()
plt.imshow(testingtrain_C2[:47,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,1], marker='.', label="actual")
plt.plot(aa, testingtest_C2[:,1,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
X_wkn_C3 = X_wkn_C3.drop(columns = 'cluster')
X_wkn_C3 = X_wkn_C3.transpose()
X_wkn_C3.head()
Out[ ]:
11 12 13 24 27 29 31 32 34 38 ... 268 270 271 273 275 280 286 287 288 290
Datetime
2012-07-01 00:00:00 0.567 0.241 0.234 0.130 0.612 0.068 1.127 0.060 0.269 0.032 ... 0.082 0.252 1.050 0.055 0.131 0.791 0.147 0.788 0.520 0.091
2012-07-01 00:30:00 0.547 0.197 0.343 0.127 0.609 0.048 1.262 0.060 0.331 0.031 ... 0.107 0.216 0.996 0.053 0.106 0.787 0.130 0.738 0.177 0.110
2012-07-01 01:00:00 0.613 0.163 0.234 0.146 0.414 0.065 1.157 0.059 0.281 0.045 ... 0.090 0.185 1.013 0.092 0.119 1.057 0.203 0.675 0.439 0.065
2012-07-01 01:30:00 0.519 0.163 0.339 0.133 0.264 0.074 0.232 0.053 0.169 0.024 ... 0.077 0.169 0.324 0.055 0.119 0.833 0.120 0.175 0.041 0.064
2012-07-01 02:00:00 0.314 0.207 0.330 0.133 0.215 0.080 0.214 0.476 0.175 0.051 ... 0.024 0.157 0.170 0.050 0.119 0.838 0.165 0.188 0.039 0.198

5 rows × 98 columns

In [ ]:
X_wkn_C3 = X_wkn_C3.values
########### Ranging the values from 0 to 1
#scaler = MinMaxScaler(feature_range=(0, 1))
#Xtrain = scaler.fit(Xtrain.reshape(0, 1))
#X_wk_C1 = scaler.fit_transform(X_wk_C1)
#####Avoiding Outliers
cap = np.percentile(X_wkn_C3, 97)   
X_wkn_C3[X_wkn_C3 > cap] = cap
In [ ]:
training_size=int(X_wkn_C3.shape[0]*0.80)

test_size=(X_wkn_C3.shape[0])-training_size

train,test=X_wkn_C3[0:training_size],X_wkn_C3[training_size:(X_wkn_C3.shape[0])]
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)
In [ ]:
print(trainX.shape, trainY.shape)
print(testX.shape, testY.shape)
(3888, 48, 98) (3888, 48, 98)
(864, 48, 98) (864, 48, 98)
In [ ]:
                             ###Building a sequential network:
Model_3 = models.Sequential()
Model_3.add(layers.Dense(400, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_3.add(Dropout(0.2))
Model_3.add(BatchNormalization())

Model_3.add(layers.Dense(200, activation='relu'))
Model_3.add(Dropout(0.2))
Model_3.add(BatchNormalization())

Model_3.add((Dense(trainX.shape[2])))
Model_3.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_3.summary()
Model: "sequential_23"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_43 (Dense)             (None, 48, 400)           39600     
_________________________________________________________________
dropout_26 (Dropout)         (None, 48, 400)           0         
_________________________________________________________________
batch_normalization_20 (Batc (None, 48, 400)           1600      
_________________________________________________________________
dense_44 (Dense)             (None, 48, 200)           80200     
_________________________________________________________________
dropout_27 (Dropout)         (None, 48, 200)           0         
_________________________________________________________________
batch_normalization_21 (Batc (None, 48, 200)           800       
_________________________________________________________________
dense_45 (Dense)             (None, 48, 98)            19698     
=================================================================
Total params: 141,898
Trainable params: 140,698
Non-trainable params: 1,200
_________________________________________________________________
In [ ]:
model_train = Model_3.fit(trainX,trainY, epochs=30, batch_size = 64, validation_split = 0.10)
Epoch 1/30
55/55 [==============================] - 4s 66ms/step - loss: 0.5901 - mae: 0.5563 - val_loss: 0.0670 - val_mae: 0.1579
Epoch 2/30
55/55 [==============================] - 4s 66ms/step - loss: 0.1924 - mae: 0.2960 - val_loss: 0.0528 - val_mae: 0.1691
Epoch 3/30
55/55 [==============================] - 4s 69ms/step - loss: 0.1220 - mae: 0.2319 - val_loss: 0.0501 - val_mae: 0.1571
Epoch 4/30
55/55 [==============================] - 3s 63ms/step - loss: 0.0908 - mae: 0.2003 - val_loss: 0.0494 - val_mae: 0.1539
Epoch 5/30
55/55 [==============================] - 3s 63ms/step - loss: 0.0745 - mae: 0.1815 - val_loss: 0.0485 - val_mae: 0.1477
Epoch 6/30
55/55 [==============================] - 4s 67ms/step - loss: 0.0652 - mae: 0.1698 - val_loss: 0.0478 - val_mae: 0.1464
Epoch 7/30
55/55 [==============================] - 4s 68ms/step - loss: 0.0599 - mae: 0.1628 - val_loss: 0.0473 - val_mae: 0.1428
Epoch 8/30
55/55 [==============================] - 4s 72ms/step - loss: 0.0565 - mae: 0.1581 - val_loss: 0.0469 - val_mae: 0.1382
Epoch 9/30
55/55 [==============================] - 4s 68ms/step - loss: 0.0544 - mae: 0.1551 - val_loss: 0.0465 - val_mae: 0.1391
Epoch 10/30
55/55 [==============================] - 4s 70ms/step - loss: 0.0525 - mae: 0.1529 - val_loss: 0.0464 - val_mae: 0.1378
Epoch 11/30
55/55 [==============================] - 4s 65ms/step - loss: 0.0511 - mae: 0.1506 - val_loss: 0.0463 - val_mae: 0.1369
Epoch 12/30
55/55 [==============================] - 4s 71ms/step - loss: 0.0498 - mae: 0.1489 - val_loss: 0.0465 - val_mae: 0.1366
Epoch 13/30
55/55 [==============================] - 4s 68ms/step - loss: 0.0487 - mae: 0.1472 - val_loss: 0.0464 - val_mae: 0.1382
Epoch 14/30
55/55 [==============================] - 4s 66ms/step - loss: 0.0477 - mae: 0.1459 - val_loss: 0.0467 - val_mae: 0.1385
Epoch 15/30
55/55 [==============================] - 4s 71ms/step - loss: 0.0467 - mae: 0.1446 - val_loss: 0.0464 - val_mae: 0.1371
Epoch 16/30
55/55 [==============================] - 4s 71ms/step - loss: 0.0459 - mae: 0.1434 - val_loss: 0.0467 - val_mae: 0.1391
Epoch 17/30
55/55 [==============================] - 4s 67ms/step - loss: 0.0450 - mae: 0.1421 - val_loss: 0.0467 - val_mae: 0.1404
Epoch 18/30
55/55 [==============================] - 4s 67ms/step - loss: 0.0443 - mae: 0.1412 - val_loss: 0.0471 - val_mae: 0.1391
Epoch 19/30
55/55 [==============================] - 4s 68ms/step - loss: 0.0437 - mae: 0.1403 - val_loss: 0.0471 - val_mae: 0.1395
Epoch 20/30
55/55 [==============================] - 4s 70ms/step - loss: 0.0430 - mae: 0.1393 - val_loss: 0.0472 - val_mae: 0.1403
Epoch 21/30
55/55 [==============================] - 4s 66ms/step - loss: 0.0422 - mae: 0.1383 - val_loss: 0.0474 - val_mae: 0.1394
Epoch 22/30
55/55 [==============================] - 3s 62ms/step - loss: 0.0417 - mae: 0.1375 - val_loss: 0.0475 - val_mae: 0.1393
Epoch 23/30
55/55 [==============================] - 3s 57ms/step - loss: 0.0410 - mae: 0.1365 - val_loss: 0.0476 - val_mae: 0.1401
Epoch 24/30
55/55 [==============================] - 3s 55ms/step - loss: 0.0405 - mae: 0.1357 - val_loss: 0.0479 - val_mae: 0.1406
Epoch 25/30
55/55 [==============================] - 3s 62ms/step - loss: 0.0399 - mae: 0.1349 - val_loss: 0.0476 - val_mae: 0.1395
Epoch 26/30
55/55 [==============================] - 3s 58ms/step - loss: 0.0395 - mae: 0.1342 - val_loss: 0.0479 - val_mae: 0.1402
Epoch 27/30
55/55 [==============================] - 3s 60ms/step - loss: 0.0389 - mae: 0.1334 - val_loss: 0.0479 - val_mae: 0.1395
Epoch 28/30
55/55 [==============================] - 4s 66ms/step - loss: 0.0385 - mae: 0.1329 - val_loss: 0.0483 - val_mae: 0.1414
Epoch 29/30
55/55 [==============================] - 3s 61ms/step - loss: 0.0382 - mae: 0.1324 - val_loss: 0.0482 - val_mae: 0.1405
Epoch 30/30
55/55 [==============================] - 4s 65ms/step - loss: 0.0378 - mae: 0.1319 - val_loss: 0.0485 - val_mae: 0.1416
In [ ]:
Seq_train = Model_3.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_3.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
122/122 [==============================] - 1s 6ms/step
[[0.40854186 0.33082265 0.23989707 ... 0.5652106  0.21824658 0.21616814]
 [0.31849396 0.23839687 0.22073516 ... 0.37990445 0.12636392 0.14502251]
 [0.24759519 0.20779744 0.16439323 ... 0.3791691  0.11232588 0.15353814]
 ...
 [0.7226467  0.29453564 0.78950423 ... 1.0824848  0.45733932 0.10411686]
 [0.55685025 0.27172595 0.6727711  ... 1.0725505  0.44318745 0.0727935 ]
 [0.45645514 0.25021845 0.53950256 ... 0.8567641  0.3775665  0.11833621]] (3888, 48, 98)
27/27 [==============================] - 0s 9ms/step
[[ 0.28707212  0.32665583  0.1298516  ...  0.32933444  0.14628194
   0.22762698]
 [ 0.27937394  0.28739557  0.09127843 ...  0.34849626  0.12432596
   0.17525044]
 [ 0.28591484  0.27601418  0.12303261 ...  0.4107222   0.11790535
   0.19298834]
 ...
 [ 0.38557896  0.26794392  0.11683127 ...  0.22809967  0.15453887
   0.1446666 ]
 [ 0.33240312  0.24220352  0.16970274 ...  0.18361367  0.15071319
   0.02106844]
 [ 0.28583986  0.20735924  0.1595102  ...  0.27767196  0.10003679
  -0.01724552]] (864, 48, 98)
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY_RMSE, testingtrain_C1))
#print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

#trainMAE = np.mean(mae(trainY[:48,1,1], testingtrain_C2[:48,1,1]))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.32 RMSE
Test Score: 0.21 MAE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(Seq_test[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,:1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,1,:1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
model = Sequential()
model.add(LSTM(200, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))
#model.add(Dropout(.01))
#model.add(LSTM(50, activation='relu', return_sequences = True))
#model.add(Dropout(.01))

model.add(Dense(trainX.shape[2]))
#sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9),   'optimizers.Adam(lr=0.01)'
model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
#, callbacks=[lr_decay]
#optimizers.Adam(lr=0.001)
Model: "sequential_26"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_15 (LSTM)               (None, 48, 200)           239200    
_________________________________________________________________
dense_48 (Dense)             (None, 48, 98)            19698     
=================================================================
Total params: 258,898
Trainable params: 258,898
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_C3 = model.fit(trainX,trainY, epochs=30,
                       validation_split = 0.10, callbacks=[lr_decay],
                       batch_size = 64)
Epoch 1/30
55/55 [==============================] - 6s 110ms/step - loss: 0.0635 - mae: 0.1704 - val_loss: 0.0482 - val_mae: 0.1444
Epoch 2/30
55/55 [==============================] - 6s 104ms/step - loss: 0.0527 - mae: 0.1529 - val_loss: 0.0489 - val_mae: 0.1478
Epoch 3/30
55/55 [==============================] - 6s 105ms/step - loss: 0.0498 - mae: 0.1487 - val_loss: 0.0492 - val_mae: 0.1476
Epoch 4/30
55/55 [==============================] - 6s 104ms/step - loss: 0.0481 - mae: 0.1461 - val_loss: 0.0498 - val_mae: 0.1485
Epoch 5/30
55/55 [==============================] - 6s 103ms/step - loss: 0.0470 - mae: 0.1444 - val_loss: 0.0497 - val_mae: 0.1478
Epoch 6/30
55/55 [==============================] - 6s 102ms/step - loss: 0.0462 - mae: 0.1432 - val_loss: 0.0504 - val_mae: 0.1499
Epoch 7/30
55/55 [==============================] - 6s 105ms/step - loss: 0.0456 - mae: 0.1424 - val_loss: 0.0503 - val_mae: 0.1489
Epoch 8/30
55/55 [==============================] - 6s 106ms/step - loss: 0.0452 - mae: 0.1417 - val_loss: 0.0511 - val_mae: 0.1513
Epoch 9/30
55/55 [==============================] - 6s 106ms/step - loss: 0.0448 - mae: 0.1412 - val_loss: 0.0509 - val_mae: 0.1502
Epoch 10/30
55/55 [==============================] - 6s 106ms/step - loss: 0.0445 - mae: 0.1407 - val_loss: 0.0511 - val_mae: 0.1507
Epoch 11/30
55/55 [==============================] - 6s 104ms/step - loss: 0.0443 - mae: 0.1404 - val_loss: 0.0510 - val_mae: 0.1503
Epoch 12/30
55/55 [==============================] - 6s 106ms/step - loss: 0.0441 - mae: 0.1401 - val_loss: 0.0513 - val_mae: 0.1510
Epoch 13/30
55/55 [==============================] - 6s 104ms/step - loss: 0.0440 - mae: 0.1399 - val_loss: 0.0512 - val_mae: 0.1506
Epoch 14/30
55/55 [==============================] - 6s 106ms/step - loss: 0.0439 - mae: 0.1397 - val_loss: 0.0514 - val_mae: 0.1512
Epoch 15/30
55/55 [==============================] - 6s 104ms/step - loss: 0.0438 - mae: 0.1396 - val_loss: 0.0516 - val_mae: 0.1517
Epoch 16/30
55/55 [==============================] - 6s 103ms/step - loss: 0.0437 - mae: 0.1395 - val_loss: 0.0515 - val_mae: 0.1513
Epoch 17/30
55/55 [==============================] - 6s 103ms/step - loss: 0.0436 - mae: 0.1394 - val_loss: 0.0516 - val_mae: 0.1515
Epoch 18/30
55/55 [==============================] - 6s 116ms/step - loss: 0.0436 - mae: 0.1393 - val_loss: 0.0516 - val_mae: 0.1514
Epoch 19/30
55/55 [==============================] - 6s 114ms/step - loss: 0.0436 - mae: 0.1393 - val_loss: 0.0516 - val_mae: 0.1514
Epoch 20/30
55/55 [==============================] - 6s 107ms/step - loss: 0.0435 - mae: 0.1392 - val_loss: 0.0516 - val_mae: 0.1515
Epoch 21/30
55/55 [==============================] - 6s 111ms/step - loss: 0.0435 - mae: 0.1392 - val_loss: 0.0516 - val_mae: 0.1515
Epoch 22/30
55/55 [==============================] - 6s 106ms/step - loss: 0.0435 - mae: 0.1392 - val_loss: 0.0517 - val_mae: 0.1517
Epoch 23/30
55/55 [==============================] - 6s 105ms/step - loss: 0.0435 - mae: 0.1392 - val_loss: 0.0516 - val_mae: 0.1514
Epoch 24/30
55/55 [==============================] - 6s 106ms/step - loss: 0.0434 - mae: 0.1391 - val_loss: 0.0517 - val_mae: 0.1515
Epoch 25/30
55/55 [==============================] - 6s 105ms/step - loss: 0.0434 - mae: 0.1391 - val_loss: 0.0517 - val_mae: 0.1516
Epoch 26/30
55/55 [==============================] - 6s 104ms/step - loss: 0.0434 - mae: 0.1391 - val_loss: 0.0517 - val_mae: 0.1516
Epoch 27/30
55/55 [==============================] - 6s 103ms/step - loss: 0.0434 - mae: 0.1391 - val_loss: 0.0517 - val_mae: 0.1516
Epoch 28/30
55/55 [==============================] - 6s 104ms/step - loss: 0.0434 - mae: 0.1391 - val_loss: 0.0517 - val_mae: 0.1516
Epoch 29/30
55/55 [==============================] - 6s 105ms/step - loss: 0.0434 - mae: 0.1391 - val_loss: 0.0517 - val_mae: 0.1516
Epoch 30/30
55/55 [==============================] - 7s 123ms/step - loss: 0.0434 - mae: 0.1391 - val_loss: 0.0517 - val_mae: 0.1516
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size


plt.plot(history_C3.history['loss'], label='train')
plt.plot(history_C3.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_C3 = model.predict(trainX, verbose = 1)
print(testingtrain_C3[0], testingtrain_C3.shape)

testingtest_C3 = model.predict(testX, verbose = 1)
print(testingtest_C3[0], testingtest_C3.shape)
122/122 [==============================] - 4s 30ms/step
[[0.4165328  0.26134077 0.16218798 ... 0.32222295 0.20529076 0.23570502]
 [0.44314525 0.27548316 0.20323479 ... 0.35658538 0.22909158 0.24250588]
 [0.4215353  0.25365472 0.22213596 ... 0.33115828 0.23169443 0.22788656]
 ...
 [0.6381572  0.34179106 0.39972863 ... 0.8736123  0.37094027 0.18361095]
 [0.58946264 0.27104455 0.3632004  ... 0.90108794 0.26905382 0.15404096]
 [0.50223935 0.20844005 0.31731987 ... 0.8194674  0.17978592 0.11757931]] (3888, 48, 98)
27/27 [==============================] - 1s 30ms/step
[[0.29911676 0.22571677 0.14205949 ... 0.29959157 0.13497151 0.2446003 ]
 [0.31327355 0.2305078  0.17868865 ... 0.36311382 0.14372727 0.25848117]
 [0.26658237 0.215325   0.17758381 ... 0.3508407  0.13593444 0.24974903]
 ...
 [0.46932483 0.24314556 0.21848188 ... 0.34372932 0.08552504 0.17234513]
 [0.41515175 0.204807   0.14791855 ... 0.29965663 0.07565595 0.18148178]
 [0.32072315 0.16923326 0.12462603 ... 0.28963858 0.06641573 0.1829927 ]] (864, 48, 98)
In [ ]:
trainScore = math.sqrt(mean_squared_error(trainY[:,1,:], testingtrain_C3[:,1,:]))
print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_C3[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))


trainMAE = np.mean(mae(trainY[:,1,:], testingtrain_C3[:,1,:]))
print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], testingtest_C3[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Train Score: 0.23 RMSE
Test Score: 0.30 RMSE
Train Score: 0.16 MAE
Test Score: 0.20 MAE
In [ ]:
plt.imshow(trainY[:47,47,:])
plt.show()
plt.imshow(testingtrain_C3[:47,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,:1], marker='.', label="actual")
plt.plot(aa, testingtest_C3[:,1,:1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
X_wkn_C4 = X_wkn_C4.drop(columns = 'cluster')
X_wkn_C4 = X_wkn_C4.transpose()
X_wkn_C4.head()
Out[ ]:
6 8 16 17 62 80 82 89 91 94 ... 212 220 228 255 256 276 279 293 299 300
Datetime
2012-07-01 00:00:00 0.260 0.208 0.355 0.122 0.168 0.044 0.137 0.047 0.106 0.511 ... 0.311 0.111 0.186 1.238 0.856 0.081 0.094 0.183 0.541 0.219
2012-07-01 00:30:00 0.253 0.151 0.508 0.121 0.175 0.038 0.133 0.073 0.077 0.504 ... 0.249 0.091 0.199 0.930 0.546 0.125 0.063 0.177 0.100 0.099
2012-07-01 01:00:00 0.180 0.092 0.555 0.147 0.188 0.056 0.114 0.047 0.073 0.473 ... 0.152 0.076 0.207 1.435 0.498 0.154 0.106 0.193 0.090 0.134
2012-07-01 01:30:00 0.220 0.152 0.542 0.141 0.344 0.025 0.159 0.089 0.082 0.513 ... 0.203 0.058 0.212 0.928 0.138 0.188 0.081 0.185 0.094 0.100
2012-07-01 02:00:00 0.171 0.083 0.571 0.219 0.172 0.050 0.143 0.065 0.079 0.491 ... 0.121 0.088 0.158 0.796 0.255 0.135 0.106 0.176 0.046 0.123

5 rows × 36 columns

In [ ]:
X_wkn_C4 = X_wkn_C4.values
########### Ranging the values from 0 to 1
#scaler = MinMaxScaler(feature_range=(0, 1))
#Xtrain = scaler.fit(Xtrain.reshape(0, 1))
#X_wk_C1 = scaler.fit_transform(X_wk_C1)
#####Avoiding Outliers
cap = np.percentile(X_wkn_C4, 95)   
X_wkn_C4[X_wkn_C4 > cap] = cap
In [ ]:
X_wkn_C4.describe()
Out[ ]:
6 8 16 17 62 80 82 89 91 94 ... 212 220 228 255 256 276 279 293 299 300
count 5040.000000 5040.000000 5040.000000 5040.000000 5040.000000 5040.000000 5040.000000 5040.000000 5040.000000 5040.000000 ... 5040.000000 5040.000000 5040.000000 5040.000000 5040.000000 5040.000000 5040.000000 5040.000000 5040.000000 5040.000000
mean 0.495485 0.481222 0.478331 0.540786 0.570581 0.424836 0.609395 0.503584 0.594658 0.456992 ... 0.531082 0.436244 0.787936 0.804635 0.598701 0.555214 0.495364 0.608039 0.635793 0.726511
std 0.320435 0.565921 0.463363 0.543318 0.648753 0.473542 0.734790 0.422058 0.510328 0.406694 ... 0.504657 0.520312 0.608481 0.473255 0.585581 0.577130 0.541153 0.692991 0.667376 0.630710
min 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 ... 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000
25% 0.266000 0.131000 0.155000 0.171000 0.181000 0.063000 0.179000 0.161000 0.157000 0.185000 ... 0.195000 0.078000 0.212000 0.455000 0.209000 0.115000 0.125000 0.201000 0.137000 0.216000
50% 0.390000 0.242500 0.320000 0.322000 0.279000 0.238000 0.343000 0.401000 0.504500 0.317000 ... 0.335000 0.217000 0.682500 0.646000 0.348000 0.276000 0.238000 0.374000 0.364000 0.478000
75% 0.628000 0.660250 0.650000 0.658000 0.701250 0.614500 0.602000 0.731000 0.919250 0.584000 ... 0.690000 0.632250 1.227000 1.027000 0.791500 0.836000 0.650000 0.796250 0.956250 1.099000
max 2.361000 4.840000 4.005000 4.256000 4.854000 2.650000 5.085000 3.226000 3.264000 3.287000 ... 4.452000 3.489000 3.803000 3.752000 4.352000 3.976000 2.956000 4.747000 3.986000 4.110000

8 rows × 36 columns

In [ ]:
training_size=int(X_wkn_C4.shape[0]*0.80)

test_size=(X_wkn_C4.shape[0])-training_size

train,test=X_wkn_C4[0:training_size],X_wkn_C4[training_size:(X_wkn_C4.shape[0])]
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)
In [ ]:
print(trainX.shape, trainY.shape)
print(testX.shape, testY.shape)
(3888, 48, 36) (3888, 48, 36)
(864, 48, 36) (864, 48, 36)
In [ ]:
Model_4 = models.Sequential()
Model_4.add(layers.Dense(50, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_4.add(Dropout(0.2))
Model_4.add(BatchNormalization())

Model_4.add(layers.Dense(50, activation='relu'))
Model_4.add(Dropout(0.2))
Model_4.add(BatchNormalization())

Model_4.add((Dense(trainX.shape[2])))
Model_4.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_4.summary()
Model: "sequential_36"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_60 (Dense)             (None, 48, 50)            1850      
_________________________________________________________________
dropout_34 (Dropout)         (None, 48, 50)            0         
_________________________________________________________________
batch_normalization_24 (Batc (None, 48, 50)            200       
_________________________________________________________________
dense_61 (Dense)             (None, 48, 50)            2550      
_________________________________________________________________
dropout_35 (Dropout)         (None, 48, 50)            0         
_________________________________________________________________
batch_normalization_25 (Batc (None, 48, 50)            200       
_________________________________________________________________
dense_62 (Dense)             (None, 48, 36)            1836      
=================================================================
Total params: 6,636
Trainable params: 6,436
Non-trainable params: 200
_________________________________________________________________
In [ ]:
model_train = Model_4.fit(trainX,trainY, epochs=30, batch_size = 32, validation_split = 0.10)
Epoch 1/30
110/110 [==============================] - 1s 7ms/step - loss: 0.8398 - mae: 0.6885 - val_loss: 0.1852 - val_mae: 0.2919
Epoch 2/30
110/110 [==============================] - 1s 5ms/step - loss: 0.2981 - mae: 0.4036 - val_loss: 0.1217 - val_mae: 0.2526
Epoch 3/30
110/110 [==============================] - 1s 5ms/step - loss: 0.2040 - mae: 0.3327 - val_loss: 0.1163 - val_mae: 0.2462
Epoch 4/30
110/110 [==============================] - 1s 6ms/step - loss: 0.1736 - mae: 0.3065 - val_loss: 0.1140 - val_mae: 0.2425
Epoch 5/30
110/110 [==============================] - 1s 5ms/step - loss: 0.1592 - mae: 0.2934 - val_loss: 0.1123 - val_mae: 0.2417
Epoch 6/30
110/110 [==============================] - 1s 6ms/step - loss: 0.1515 - mae: 0.2860 - val_loss: 0.1115 - val_mae: 0.2404
Epoch 7/30
110/110 [==============================] - 1s 6ms/step - loss: 0.1466 - mae: 0.2810 - val_loss: 0.1114 - val_mae: 0.2432
Epoch 8/30
110/110 [==============================] - 1s 6ms/step - loss: 0.1436 - mae: 0.2783 - val_loss: 0.1109 - val_mae: 0.2413
Epoch 9/30
110/110 [==============================] - 1s 6ms/step - loss: 0.1411 - mae: 0.2756 - val_loss: 0.1100 - val_mae: 0.2389
Epoch 10/30
110/110 [==============================] - 1s 6ms/step - loss: 0.1396 - mae: 0.2742 - val_loss: 0.1097 - val_mae: 0.2371
Epoch 11/30
110/110 [==============================] - 1s 5ms/step - loss: 0.1381 - mae: 0.2727 - val_loss: 0.1101 - val_mae: 0.2381
Epoch 12/30
110/110 [==============================] - 1s 6ms/step - loss: 0.1369 - mae: 0.2715 - val_loss: 0.1111 - val_mae: 0.2417
Epoch 13/30
110/110 [==============================] - 1s 6ms/step - loss: 0.1355 - mae: 0.2701 - val_loss: 0.1100 - val_mae: 0.2375
Epoch 14/30
110/110 [==============================] - 1s 6ms/step - loss: 0.1346 - mae: 0.2690 - val_loss: 0.1120 - val_mae: 0.2451
Epoch 15/30
110/110 [==============================] - 1s 7ms/step - loss: 0.1341 - mae: 0.2692 - val_loss: 0.1097 - val_mae: 0.2334
Epoch 16/30
110/110 [==============================] - 1s 6ms/step - loss: 0.1331 - mae: 0.2678 - val_loss: 0.1102 - val_mae: 0.2385
Epoch 17/30
110/110 [==============================] - 1s 7ms/step - loss: 0.1323 - mae: 0.2670 - val_loss: 0.1101 - val_mae: 0.2371
Epoch 18/30
110/110 [==============================] - 1s 6ms/step - loss: 0.1317 - mae: 0.2663 - val_loss: 0.1100 - val_mae: 0.2360
Epoch 19/30
110/110 [==============================] - 1s 6ms/step - loss: 0.1307 - mae: 0.2651 - val_loss: 0.1106 - val_mae: 0.2390
Epoch 20/30
110/110 [==============================] - 1s 7ms/step - loss: 0.1302 - mae: 0.2649 - val_loss: 0.1109 - val_mae: 0.2405
Epoch 21/30
110/110 [==============================] - 1s 6ms/step - loss: 0.1297 - mae: 0.2641 - val_loss: 0.1101 - val_mae: 0.2381
Epoch 22/30
110/110 [==============================] - 1s 6ms/step - loss: 0.1289 - mae: 0.2635 - val_loss: 0.1094 - val_mae: 0.2332
Epoch 23/30
110/110 [==============================] - 1s 6ms/step - loss: 0.1283 - mae: 0.2626 - val_loss: 0.1093 - val_mae: 0.2347
Epoch 24/30
110/110 [==============================] - 1s 6ms/step - loss: 0.1280 - mae: 0.2624 - val_loss: 0.1090 - val_mae: 0.2336
Epoch 25/30
110/110 [==============================] - 1s 7ms/step - loss: 0.1273 - mae: 0.2616 - val_loss: 0.1090 - val_mae: 0.2336
Epoch 26/30
110/110 [==============================] - 1s 6ms/step - loss: 0.1277 - mae: 0.2617 - val_loss: 0.1103 - val_mae: 0.2408
Epoch 27/30
110/110 [==============================] - 1s 6ms/step - loss: 0.1269 - mae: 0.2611 - val_loss: 0.1105 - val_mae: 0.2382
Epoch 28/30
110/110 [==============================] - 1s 6ms/step - loss: 0.1262 - mae: 0.2603 - val_loss: 0.1086 - val_mae: 0.2309
Epoch 29/30
110/110 [==============================] - 1s 6ms/step - loss: 0.1258 - mae: 0.2597 - val_loss: 0.1087 - val_mae: 0.2337
Epoch 30/30
110/110 [==============================] - 1s 6ms/step - loss: 0.1252 - mae: 0.2594 - val_loss: 0.1084 - val_mae: 0.2303
In [ ]:
Seq_train = Model_4.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_4.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
122/122 [==============================] - 0s 2ms/step
[[0.43795007 0.14972118 0.4918708  ... 0.3379226  0.35087776 0.6249676 ]
 [0.43309844 0.13814421 0.5254885  ... 0.3382992  0.30491182 0.47205478]
 [0.44505003 0.09715711 0.49507958 ... 0.36617142 0.23908082 0.23652682]
 ...
 [0.5695745  0.46330225 0.56792974 ... 1.0476954  0.77623576 0.664128  ]
 [0.5307566  0.2395247  0.5724336  ... 0.8103596  0.5556046  0.8257611 ]
 [0.51040876 0.27951068 0.72884184 ... 0.64879084 0.65024    1.1493033 ]] (3888, 48, 36)
27/27 [==============================] - 0s 2ms/step
[[0.29419073 0.12821281 0.31839406 ... 0.22707298 0.24744287 0.47324124]
 [0.33206335 0.14994171 0.3194281  ... 0.26144442 0.24098282 0.4214878 ]
 [0.263421   0.10036258 0.18672106 ... 0.40599075 0.11013892 0.19028184]
 ...
 [0.31142864 0.2044548  0.2358785  ... 0.06337065 0.7076225  0.67236847]
 [0.2958635  0.1973117  0.30219185 ... 0.11017536 0.37655485 0.6048691 ]
 [0.33036485 0.21173954 0.37249976 ... 0.14252041 0.39558625 0.6177683 ]] (864, 48, 36)
In [ ]:
def rmse(actual, pred):
    return np.sqrt(((pred - actual) ** 2).mean())
def mae(actual, pred):
    return np.mean(np.abs(actual - pred))
In [ ]:
testScore = math.sqrt(mean_squared_error(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))



testMAE = np.mean(mae(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.44 RMSE
Test Score: 0.33 MAE
In [ ]:
plt.imshow(testY[:47,47,:])
plt.show()
plt.imshow(Seq_test[:47,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,:1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,1,:1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
model = Sequential()
model.add(LSTM(50, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))

#model.add(Dropout(.5))
#model.add(LSTM(50, activation='relu', return_sequences = True))
#model.add(Dropout(.01))

model.add(Dense(trainX.shape[2]))

model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
Model: "sequential_38"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_25 (LSTM)               (None, 48, 50)            17400     
_________________________________________________________________
dense_64 (Dense)             (None, 48, 36)            1836      
=================================================================
Total params: 19,236
Trainable params: 19,236
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_C4 = model.fit(trainX,trainY, epochs=30, batch_size = 32,
                       validation_split = 0.10, callbacks=[lr_decay])
Epoch 1/30
110/110 [==============================] - 3s 25ms/step - loss: 0.2433 - mae: 0.3559 - val_loss: 0.1319 - val_mae: 0.2682
Epoch 2/30
110/110 [==============================] - 2s 19ms/step - loss: 0.1536 - mae: 0.2936 - val_loss: 0.1196 - val_mae: 0.2541
Epoch 3/30
110/110 [==============================] - 2s 18ms/step - loss: 0.1417 - mae: 0.2787 - val_loss: 0.1166 - val_mae: 0.2503
Epoch 4/30
110/110 [==============================] - 2s 19ms/step - loss: 0.1356 - mae: 0.2712 - val_loss: 0.1163 - val_mae: 0.2502
Epoch 5/30
110/110 [==============================] - 2s 19ms/step - loss: 0.1322 - mae: 0.2672 - val_loss: 0.1142 - val_mae: 0.2467
Epoch 6/30
110/110 [==============================] - 2s 19ms/step - loss: 0.1292 - mae: 0.2638 - val_loss: 0.1150 - val_mae: 0.2477
Epoch 7/30
110/110 [==============================] - 2s 19ms/step - loss: 0.1272 - mae: 0.2616 - val_loss: 0.1143 - val_mae: 0.2459
Epoch 8/30
110/110 [==============================] - 2s 19ms/step - loss: 0.1257 - mae: 0.2597 - val_loss: 0.1144 - val_mae: 0.2471
Epoch 9/30
110/110 [==============================] - 2s 19ms/step - loss: 0.1246 - mae: 0.2586 - val_loss: 0.1137 - val_mae: 0.2449
Epoch 10/30
110/110 [==============================] - 2s 19ms/step - loss: 0.1236 - mae: 0.2573 - val_loss: 0.1137 - val_mae: 0.2453
Epoch 11/30
110/110 [==============================] - 2s 19ms/step - loss: 0.1229 - mae: 0.2565 - val_loss: 0.1134 - val_mae: 0.2444
Epoch 12/30
110/110 [==============================] - 2s 19ms/step - loss: 0.1224 - mae: 0.2558 - val_loss: 0.1133 - val_mae: 0.2443
Epoch 13/30
110/110 [==============================] - 2s 19ms/step - loss: 0.1219 - mae: 0.2552 - val_loss: 0.1131 - val_mae: 0.2437
Epoch 14/30
110/110 [==============================] - 2s 19ms/step - loss: 0.1216 - mae: 0.2548 - val_loss: 0.1131 - val_mae: 0.2437
Epoch 15/30
110/110 [==============================] - 2s 19ms/step - loss: 0.1213 - mae: 0.2544 - val_loss: 0.1131 - val_mae: 0.2441
Epoch 16/30
110/110 [==============================] - 2s 19ms/step - loss: 0.1211 - mae: 0.2543 - val_loss: 0.1129 - val_mae: 0.2435
Epoch 17/30
110/110 [==============================] - 2s 19ms/step - loss: 0.1209 - mae: 0.2540 - val_loss: 0.1129 - val_mae: 0.2435
Epoch 18/30
110/110 [==============================] - 2s 19ms/step - loss: 0.1208 - mae: 0.2538 - val_loss: 0.1129 - val_mae: 0.2434
Epoch 19/30
110/110 [==============================] - 2s 19ms/step - loss: 0.1206 - mae: 0.2537 - val_loss: 0.1129 - val_mae: 0.2434
Epoch 20/30
110/110 [==============================] - 2s 19ms/step - loss: 0.1205 - mae: 0.2535 - val_loss: 0.1130 - val_mae: 0.2439
Epoch 21/30
110/110 [==============================] - 2s 19ms/step - loss: 0.1205 - mae: 0.2535 - val_loss: 0.1129 - val_mae: 0.2436
Epoch 22/30
110/110 [==============================] - 2s 20ms/step - loss: 0.1204 - mae: 0.2534 - val_loss: 0.1128 - val_mae: 0.2432
Epoch 23/30
110/110 [==============================] - 2s 22ms/step - loss: 0.1204 - mae: 0.2533 - val_loss: 0.1128 - val_mae: 0.2434
Epoch 24/30
110/110 [==============================] - 2s 18ms/step - loss: 0.1203 - mae: 0.2533 - val_loss: 0.1129 - val_mae: 0.2435
Epoch 25/30
110/110 [==============================] - 2s 22ms/step - loss: 0.1203 - mae: 0.2533 - val_loss: 0.1128 - val_mae: 0.2432
Epoch 26/30
110/110 [==============================] - 3s 23ms/step - loss: 0.1203 - mae: 0.2532 - val_loss: 0.1129 - val_mae: 0.2434
Epoch 27/30
110/110 [==============================] - 2s 19ms/step - loss: 0.1202 - mae: 0.2532 - val_loss: 0.1128 - val_mae: 0.2435
Epoch 28/30
110/110 [==============================] - 2s 19ms/step - loss: 0.1202 - mae: 0.2532 - val_loss: 0.1128 - val_mae: 0.2433
Epoch 29/30
110/110 [==============================] - 2s 19ms/step - loss: 0.1202 - mae: 0.2531 - val_loss: 0.1128 - val_mae: 0.2433
Epoch 30/30
110/110 [==============================] - 2s 19ms/step - loss: 0.1202 - mae: 0.2531 - val_loss: 0.1128 - val_mae: 0.2433
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_C4.history['loss'], label='train')
plt.plot(history_C4.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_C4 = model.predict(trainX, verbose = 1)
print(testingtrain_C4[0], testingtrain_C4.shape)

testingtest_C4 = model.predict(testX, verbose = 1)
print(testingtest_C4[0], testingtest_C4.shape)
122/122 [==============================] - 1s 5ms/step
[[0.3848795  0.2123671  0.39055553 ... 0.4081918  0.2857102  0.4150855 ]
 [0.44634297 0.19115943 0.52749884 ... 0.3378146  0.2470568  0.5144469 ]
 [0.4277559  0.19680579 0.5630783  ... 0.30746228 0.2514426  0.47818524]
 ...
 [0.61935127 0.6485281  0.6756485  ... 1.0886306  0.9805956  0.39378655]
 [0.42761838 0.2699303  0.58826053 ... 0.8780795  0.7238216  0.52412546]
 [0.29021543 0.02060968 0.48438102 ... 0.6583081  0.41486108 0.63905144]] (3888, 48, 36)
27/27 [==============================] - 0s 5ms/step
[[0.29956168 0.13982418 0.3077374  ... 0.29258436 0.2845307  0.42427665]
 [0.35098362 0.18309878 0.4142684  ... 0.27275544 0.28013703 0.5015626 ]
 [0.28984424 0.1802613  0.34279567 ... 0.2216315  0.20783688 0.3866002 ]
 ...
 [0.37952545 0.4260361  0.24325594 ... 0.45361817 0.40000212 0.5888409 ]
 [0.34146738 0.26353025 0.2521727  ... 0.28506064 0.2505158  0.58302104]
 [0.2492327  0.21876146 0.28446582 ... 0.21858338 0.1429754  0.5311775 ]] (864, 48, 36)
In [ ]:
testMAE = np.mean(mae(testY[:,1,:], testingtest_C4[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))

testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_C4[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))
Test Score: 0.33 MAE
Test Score: 0.43 RMSE
In [ ]:
plt.imshow(testY[:47,1,:])
plt.show()
plt.imshow(testingtest_C4[:47,1,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,:1], marker='.', label="actual")
plt.plot(aa, testingtest_C4[:,1,:1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()

Weekday data with no clusters

In [ ]:
#X_wk_C1 = X_wk_C1.drop(columns = 'cluster')
X_wk = X_wk.transpose()
X_wk.head()
Out[ ]:
1 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 ... 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300
Datetime
2012-07-02 00:00:00 0.309 1.044 0.094 0.058 0.152 0.079 0.079 0.026 0.753 0.291 0.176 0.160 0.190 0.061 0.329 0.139 0.064 0.038 2.350 0.185 0.068 0.056 0.137 0.150 0.838 0.847 0.138 0.100 0.068 0.275 0.056 0.291 0.250 0.340 0.120 0.048 0.156 0.124 0.107 0.138 ... 0.105 0.177 0.953 0.038 0.075 0.006 1.306 0.044 0.875 0.163 1.257 0.160 0.103 0.446 0.138 0.094 0.079 0.130 0.069 0.252 1.297 0.319 0.079 0.679 0.085 0.170 0.950 0.053 0.741 0.067 0.428 0.031 0.155 0.122 0.099 0.036 0.158 0.146 0.098 0.888
2012-07-02 00:30:00 0.082 0.992 0.091 0.093 0.392 0.044 0.135 0.022 0.350 0.318 0.323 0.159 0.206 0.053 0.345 0.160 0.076 0.069 2.300 0.168 0.070 0.104 0.126 0.063 0.838 0.506 0.125 0.098 0.022 0.294 0.063 0.286 0.156 0.239 0.041 0.191 0.597 0.103 0.060 0.152 ... 0.079 0.177 0.725 0.027 0.100 0.013 1.328 0.087 0.989 0.159 1.094 0.145 0.108 0.273 0.150 0.069 0.095 0.202 0.106 0.543 0.648 0.250 0.046 0.377 0.080 0.154 0.925 0.049 0.734 0.090 0.212 0.081 0.219 0.209 0.091 0.042 0.223 0.117 0.043 0.553
2012-07-02 01:00:00 0.059 0.448 0.087 0.065 0.329 0.078 0.078 0.012 0.052 0.304 0.223 0.131 0.200 0.064 0.366 0.121 0.075 0.063 2.325 0.162 0.066 0.059 0.127 0.050 0.863 0.265 0.131 0.050 0.066 0.291 0.064 0.285 0.113 0.218 0.071 0.151 0.594 0.117 0.103 0.149 ... 0.093 0.176 0.724 0.039 0.069 0.037 1.170 0.020 0.713 0.153 0.955 0.131 0.050 0.474 0.150 0.101 0.096 0.122 0.081 0.446 0.671 0.188 0.055 0.178 0.070 0.123 0.875 0.044 0.744 0.076 0.263 0.025 0.201 0.139 0.084 0.019 0.104 0.073 0.094 0.371
2012-07-02 01:30:00 0.097 0.071 0.080 0.081 0.318 0.033 0.137 0.036 0.016 0.315 0.271 0.158 0.183 0.065 0.347 0.121 0.059 0.050 1.925 0.182 0.059 0.048 0.133 0.088 0.850 0.249 0.100 0.064 0.030 0.265 0.063 0.297 0.131 0.235 0.060 0.201 0.569 0.106 0.063 0.146 ... 0.087 0.183 0.619 0.034 0.088 0.000 1.168 0.030 0.614 0.151 0.749 0.149 0.083 0.528 0.194 0.086 0.076 0.079 0.100 0.150 0.628 0.200 0.153 0.215 0.080 0.183 0.913 0.012 0.216 0.086 0.131 0.156 0.173 0.127 0.070 0.052 0.154 0.064 0.051 0.222
2012-07-02 02:00:00 0.290 0.069 0.093 0.079 0.312 0.075 0.081 0.012 0.046 0.334 0.134 0.160 0.214 0.044 0.329 0.149 0.061 0.063 0.113 0.177 0.142 0.078 0.129 0.050 0.850 0.286 0.088 0.082 0.059 0.310 0.064 0.279 0.131 0.257 0.036 0.173 0.050 0.109 0.095 0.160 ... 0.082 0.188 0.679 0.129 0.069 0.006 1.186 0.081 0.675 0.158 0.703 0.126 0.059 0.531 0.138 0.082 0.091 0.037 0.069 0.149 0.186 0.150 0.079 0.152 0.068 0.137 0.888 0.041 0.147 0.083 0.152 0.363 0.209 0.142 0.100 0.036 0.085 0.067 0.087 0.099

5 rows × 299 columns

In [ ]:
X_wk = X_wk.values
########### Ranging the values from 0 to 1
#scaler = MinMaxScaler(feature_range=(0, 1))
#Xtrain = scaler.fit(Xtrain.reshape(0, 1))
#X_wk_C1 = scaler.fit_transform(X_wk_C1)
#####Avoiding Outliers
#cap = np.percentile(X_wk_C1, 97)   
#X_wk_C1[X_wk_C1 > cap] = cap
In [ ]:
training_size=int(X_wk.shape[0]*0.80)

test_size=(X_wk.shape[0])-training_size

train,test=X_wk[0:training_size],X_wk[training_size:(X_wk.shape[0])]
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)

print(trainX.shape, trainY.shape)
print(testX.shape, testY.shape)
(9840, 48, 299) (9840, 48, 299)
(2352, 48, 299) (2352, 48, 299)
In [ ]:
                             ###Building a sequential network:
Model_wk = models.Sequential()
Model_wk.add(layers.Dense(400, activation='relu', input_shape=(trainX.shape[1], trainX.shape[2])))
Model_wk.add(Dropout(.01))
Model_wk.add(BatchNormalization())

Model_wk.add(Dense(200))
Model_wk.add(Dropout(.01))
Model_wk.add(BatchNormalization())

Model_wk.add((Dense(trainX.shape[2])))
Model_wk.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_wk.summary()
Model: "sequential_9"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_19 (Dense)             (None, 48, 400)           120000    
_________________________________________________________________
dropout_12 (Dropout)         (None, 48, 400)           0         
_________________________________________________________________
batch_normalization_10 (Batc (None, 48, 400)           1600      
_________________________________________________________________
dense_20 (Dense)             (None, 48, 200)           80200     
_________________________________________________________________
dropout_13 (Dropout)         (None, 48, 200)           0         
_________________________________________________________________
batch_normalization_11 (Batc (None, 48, 200)           800       
_________________________________________________________________
dense_21 (Dense)             (None, 48, 299)           60099     
=================================================================
Total params: 262,699
Trainable params: 261,499
Non-trainable params: 1,200
_________________________________________________________________
In [ ]:
model_train = Model_wk.fit(trainX,trainY, epochs=30, validation_split = 0.10, batch_size=64)
Epoch 1/30
139/139 [==============================] - 18s 131ms/step - loss: 0.1772 - mae: 0.2580 - val_loss: 0.0641 - val_mae: 0.1565
Epoch 2/30
139/139 [==============================] - 18s 130ms/step - loss: 0.0867 - mae: 0.1766 - val_loss: 0.0599 - val_mae: 0.1404
Epoch 3/30
139/139 [==============================] - 18s 128ms/step - loss: 0.0801 - mae: 0.1695 - val_loss: 0.0603 - val_mae: 0.1373
Epoch 4/30
139/139 [==============================] - 18s 129ms/step - loss: 0.0759 - mae: 0.1649 - val_loss: 0.0591 - val_mae: 0.1380
Epoch 5/30
139/139 [==============================] - 18s 129ms/step - loss: 0.0726 - mae: 0.1616 - val_loss: 0.0594 - val_mae: 0.1377
Epoch 6/30
139/139 [==============================] - 18s 128ms/step - loss: 0.0697 - mae: 0.1585 - val_loss: 0.0597 - val_mae: 0.1368
Epoch 7/30
139/139 [==============================] - 18s 128ms/step - loss: 0.0673 - mae: 0.1562 - val_loss: 0.0602 - val_mae: 0.1385
Epoch 8/30
139/139 [==============================] - 18s 128ms/step - loss: 0.0650 - mae: 0.1538 - val_loss: 0.0605 - val_mae: 0.1408
Epoch 9/30
139/139 [==============================] - 18s 129ms/step - loss: 0.0630 - mae: 0.1519 - val_loss: 0.0615 - val_mae: 0.1390
Epoch 10/30
139/139 [==============================] - 18s 128ms/step - loss: 0.0611 - mae: 0.1501 - val_loss: 0.0619 - val_mae: 0.1426
Epoch 11/30
139/139 [==============================] - 18s 128ms/step - loss: 0.0592 - mae: 0.1480 - val_loss: 0.0632 - val_mae: 0.1430
Epoch 12/30
139/139 [==============================] - 18s 128ms/step - loss: 0.0577 - mae: 0.1467 - val_loss: 0.0636 - val_mae: 0.1421
Epoch 13/30
139/139 [==============================] - 18s 127ms/step - loss: 0.0562 - mae: 0.1453 - val_loss: 0.0643 - val_mae: 0.1433
Epoch 14/30
139/139 [==============================] - 18s 128ms/step - loss: 0.0548 - mae: 0.1440 - val_loss: 0.0652 - val_mae: 0.1489
Epoch 15/30
139/139 [==============================] - 18s 128ms/step - loss: 0.0536 - mae: 0.1428 - val_loss: 0.0665 - val_mae: 0.1483
Epoch 16/30
139/139 [==============================] - 18s 132ms/step - loss: 0.0526 - mae: 0.1418 - val_loss: 0.0663 - val_mae: 0.1481
Epoch 17/30
139/139 [==============================] - 18s 127ms/step - loss: 0.0515 - mae: 0.1406 - val_loss: 0.0665 - val_mae: 0.1467
Epoch 18/30
139/139 [==============================] - 18s 128ms/step - loss: 0.0505 - mae: 0.1396 - val_loss: 0.0669 - val_mae: 0.1487
Epoch 19/30
139/139 [==============================] - 18s 132ms/step - loss: 0.0498 - mae: 0.1390 - val_loss: 0.0680 - val_mae: 0.1506
Epoch 20/30
139/139 [==============================] - 18s 127ms/step - loss: 0.0491 - mae: 0.1379 - val_loss: 0.0687 - val_mae: 0.1489
Epoch 21/30
139/139 [==============================] - 18s 128ms/step - loss: 0.0485 - mae: 0.1376 - val_loss: 0.0684 - val_mae: 0.1508
Epoch 22/30
139/139 [==============================] - 17s 125ms/step - loss: 0.0480 - mae: 0.1369 - val_loss: 0.0688 - val_mae: 0.1520
Epoch 23/30
139/139 [==============================] - 17s 124ms/step - loss: 0.0475 - mae: 0.1365 - val_loss: 0.0695 - val_mae: 0.1509
Epoch 24/30
139/139 [==============================] - 17s 124ms/step - loss: 0.0470 - mae: 0.1358 - val_loss: 0.0696 - val_mae: 0.1523
Epoch 25/30
139/139 [==============================] - 18s 128ms/step - loss: 0.0466 - mae: 0.1353 - val_loss: 0.0712 - val_mae: 0.1533
Epoch 26/30
139/139 [==============================] - 17s 126ms/step - loss: 0.0463 - mae: 0.1350 - val_loss: 0.0705 - val_mae: 0.1545
Epoch 27/30
139/139 [==============================] - 17s 125ms/step - loss: 0.0463 - mae: 0.1351 - val_loss: 0.0717 - val_mae: 0.1531
Epoch 28/30
139/139 [==============================] - 17s 125ms/step - loss: 0.0458 - mae: 0.1344 - val_loss: 0.0733 - val_mae: 0.1555
Epoch 29/30
139/139 [==============================] - 18s 126ms/step - loss: 0.0457 - mae: 0.1344 - val_loss: 0.0725 - val_mae: 0.1555
Epoch 30/30
139/139 [==============================] - 17s 125ms/step - loss: 0.0453 - mae: 0.1340 - val_loss: 0.0722 - val_mae: 0.1538
In [ ]:
Seq_train = Model_wk.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_wk.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
308/308 [==============================] - 6s 18ms/step
[[ 0.5680974   0.22906427  0.24173445 ...  0.1694634  -0.19995768
   0.3710842 ]
 [ 0.26325375  0.19534555  0.13244115 ...  0.14202692  0.22902128
   0.26475984]
 [ 0.18640262  0.18718672  0.11259292 ...  0.1339849   0.36373574
   0.30689085]
 ...
 [ 0.83652353  0.43373364  0.03587861 ...  0.29398027  0.67156446
   0.30816865]
 [ 0.46721774  0.2607888   0.09700938 ...  0.28361267  0.59912133
   1.171351  ]
 [ 0.08863787  0.2828458   0.05963093 ...  0.22277379  0.47789675
   1.302546  ]] (9840, 48, 299)
74/74 [==============================] - 1s 17ms/step
[[ 0.11155239  0.07423376  0.10801794 ...  0.10112432  0.37651408
   0.74474376]
 [ 0.36377382  0.02054577  0.10788167 ...  0.10742366  0.14498517
   0.21365264]
 [ 0.3799943   0.07963139  0.12881224 ...  0.09890988  0.44172013
   0.1372038 ]
 ...
 [ 0.25654334  0.06773252  0.22966935 ...  0.17747864  0.03805881
   0.639286  ]
 [ 0.3249678   0.06299691  0.13358676 ...  0.11430921  0.38128245
   1.1755569 ]
 [ 0.1084296  -0.02035036  0.14966337 ...  0.12930617  0.4550054
   1.1323829 ]] (2352, 48, 299)
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY_RMSE, testingtrain_C1))
#print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

#trainMAE = np.mean(mae(trainY[:48,1,1], testingtrain_C2[:48,1,1]))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.39 RMSE
Test Score: 0.23 MAE
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,1,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
model = Sequential()
model.add(LSTM(200, activation='relu', input_shape =( trainX.shape[1],trainX.shape[2]), return_sequences = True))
#model.add(Dropout(.2))


#model.add(LSTM(100, activation='relu', return_sequences=True))
#model.add(Dropout(.2))


#model.add(Dropout(.01))
#model.add(LSTM(50, activation='relu', return_sequences = True))
#model.add(Dropout(.01))

model.add(Dense(trainX.shape[2]))
#sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9),   'optimizers.Adam(lr=0.01)'
model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
Model: "sequential_10"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_4 (LSTM)                (None, 48, 200)           400000    
_________________________________________________________________
dense_22 (Dense)             (None, 48, 299)           60099     
=================================================================
Total params: 460,099
Trainable params: 460,099
Non-trainable params: 0
_________________________________________________________________
In [ ]:
lr_decay = callbacks.LearningRateScheduler(schedule=lambda epoch: 0.001 * (0.80 ** epoch))
In [ ]:
# fit model
history_wk = model.fit(trainX,trainY, epochs=20, 
                         validation_split = 0.10, 
                         batch_size = 64, 
                         callbacks=[lr_decay])
Epoch 1/20
139/139 [==============================] - 30s 217ms/step - loss: 0.1042 - mae: 0.1870 - val_loss: 0.0626 - val_mae: 0.1477
Epoch 2/20
139/139 [==============================] - 30s 216ms/step - loss: 0.0791 - mae: 0.1610 - val_loss: 0.0591 - val_mae: 0.1383
Epoch 3/20
139/139 [==============================] - 30s 215ms/step - loss: 0.0721 - mae: 0.1539 - val_loss: 0.0579 - val_mae: 0.1362
Epoch 4/20
139/139 [==============================] - 30s 214ms/step - loss: 0.0686 - mae: 0.1504 - val_loss: 0.0575 - val_mae: 0.1367
Epoch 5/20
139/139 [==============================] - 30s 217ms/step - loss: 0.0663 - mae: 0.1482 - val_loss: 0.0574 - val_mae: 0.1358
Epoch 6/20
139/139 [==============================] - 30s 216ms/step - loss: 0.0645 - mae: 0.1466 - val_loss: 0.0574 - val_mae: 0.1344
Epoch 7/20
139/139 [==============================] - 30s 214ms/step - loss: 0.0632 - mae: 0.1453 - val_loss: 0.0574 - val_mae: 0.1350
Epoch 8/20
139/139 [==============================] - 30s 215ms/step - loss: 0.0621 - mae: 0.1443 - val_loss: 0.0572 - val_mae: 0.1344
Epoch 9/20
139/139 [==============================] - 30s 214ms/step - loss: 0.0612 - mae: 0.1436 - val_loss: 0.0573 - val_mae: 0.1341
Epoch 10/20
139/139 [==============================] - 30s 214ms/step - loss: 0.0605 - mae: 0.1429 - val_loss: 0.0573 - val_mae: 0.1344
Epoch 11/20
139/139 [==============================] - 30s 215ms/step - loss: 0.0600 - mae: 0.1424 - val_loss: 0.0573 - val_mae: 0.1344
Epoch 12/20
139/139 [==============================] - 30s 212ms/step - loss: 0.0596 - mae: 0.1421 - val_loss: 0.0574 - val_mae: 0.1344
Epoch 13/20
139/139 [==============================] - 30s 213ms/step - loss: 0.0593 - mae: 0.1418 - val_loss: 0.0573 - val_mae: 0.1344
Epoch 14/20
139/139 [==============================] - 30s 217ms/step - loss: 0.0590 - mae: 0.1415 - val_loss: 0.0573 - val_mae: 0.1347
Epoch 15/20
139/139 [==============================] - 30s 216ms/step - loss: 0.0588 - mae: 0.1413 - val_loss: 0.0573 - val_mae: 0.1345
Epoch 16/20
139/139 [==============================] - 30s 214ms/step - loss: 0.0586 - mae: 0.1412 - val_loss: 0.0574 - val_mae: 0.1346
Epoch 17/20
139/139 [==============================] - 30s 214ms/step - loss: 0.0585 - mae: 0.1411 - val_loss: 0.0573 - val_mae: 0.1344
Epoch 18/20
139/139 [==============================] - 30s 216ms/step - loss: 0.0584 - mae: 0.1409 - val_loss: 0.0574 - val_mae: 0.1346
Epoch 19/20
139/139 [==============================] - 30s 215ms/step - loss: 0.0583 - mae: 0.1409 - val_loss: 0.0574 - val_mae: 0.1347
Epoch 20/20
139/139 [==============================] - 30s 213ms/step - loss: 0.0582 - mae: 0.1408 - val_loss: 0.0574 - val_mae: 0.1347
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_wk.history['loss'], label='train')
plt.plot(history_wk.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_wk = model.predict(trainX, verbose = 1)
print(testingtrain_wk[0], testingtrain_wk.shape)

testingtest_wk = model.predict(testX, verbose = 1)
print(testingtest_wk[0], testingtest_wk.shape)
308/308 [==============================] - 16s 51ms/step
[[0.47961098 0.13011347 0.11589101 ... 0.12432559 0.29668126 0.4998193 ]
 [0.45162892 0.1109938  0.11774118 ... 0.10638232 0.27146897 0.3501279 ]
 [0.45393518 0.08116338 0.1114116  ... 0.07496182 0.26545352 0.12149573]
 ...
 [0.69024014 0.52852285 0.1040387  ... 0.23295702 0.50764775 0.7576387 ]
 [0.5459399  0.41978943 0.12104275 ... 0.19322596 0.26929396 1.1226845 ]
 [0.39830205 0.3334907  0.10658635 ... 0.17163858 0.23134933 1.1309266 ]] (9840, 48, 299)
74/74 [==============================] - 4s 51ms/step
[[0.29926258 0.08950295 0.12299322 ... 0.06618792 0.26034614 0.5481621 ]
 [0.35576284 0.10884197 0.12924509 ... 0.07047172 0.27822503 0.46055317]
 [0.33964145 0.08616377 0.1061127  ... 0.09274643 0.27680224 0.3190781 ]
 ...
 [0.40382695 0.11825089 0.1644677  ... 0.19082509 0.438848   0.8736755 ]
 [0.32373226 0.10909735 0.1288597  ... 0.17343399 0.26121038 1.2301989 ]
 [0.10078562 0.03189413 0.11073171 ... 0.17143457 0.18352324 1.1783121 ]] (2352, 48, 299)
In [ ]:
trainScore = math.sqrt(mean_squared_error(trainY[:,1,:], testingtrain_wk[:,1,:]))
print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_wk[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

trainMAE = np.mean(mae(trainY[:,1,:], testingtrain_wk[:,1,:]))
print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], testingtest_wk[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Train Score: 0.26 RMSE
Test Score: 0.33 RMSE
Train Score: 0.15 MAE
Test Score: 0.20 MAE
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,:1], marker='.', label="actual")
plt.plot(aa, testingtest_wk[:,1,:1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()

Weekend Data with no clusters

In [ ]:
X_wkn = X_wkn.transpose()
X_wkn.head()
Out[ ]:
1 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 ... 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300
Datetime
2012-07-01 00:00:00 0.855 0.965 0.084 0.108 0.260 0.087 0.208 0.025 0.020 0.567 0.241 0.234 0.191 0.066 0.355 0.122 0.184 0.075 0.050 0.187 0.067 0.043 0.130 0.038 0.863 0.612 0.106 0.068 0.045 1.127 0.060 0.668 0.269 0.277 0.183 0.028 0.032 0.101 0.071 0.138 ... 0.584 0.490 0.783 0.061 0.100 0.000 0.761 0.082 0.725 0.252 1.050 0.151 0.055 0.251 0.131 0.081 0.081 0.060 0.094 0.791 1.096 0.313 0.055 0.075 0.048 0.147 0.788 0.520 0.726 0.091 0.221 0.075 0.183 0.134 0.075 0.030 0.141 0.157 0.541 0.219
2012-07-01 00:30:00 0.786 0.927 0.084 0.098 0.253 0.098 0.151 0.022 0.036 0.547 0.197 0.343 0.176 0.067 0.508 0.121 0.128 0.075 0.063 0.169 0.069 0.117 0.127 0.088 0.813 0.609 0.088 0.048 0.043 1.262 0.060 0.661 0.331 0.276 0.077 0.074 0.031 0.073 0.071 0.110 ... 0.136 0.205 0.676 0.058 0.075 0.006 1.390 0.107 0.675 0.216 0.996 0.241 0.053 0.159 0.106 0.125 0.091 0.056 0.063 0.787 0.627 0.106 0.042 0.081 0.042 0.130 0.738 0.177 0.733 0.110 0.231 0.075 0.177 0.174 0.084 0.053 0.257 0.127 0.100 0.099
2012-07-01 01:00:00 0.604 1.359 0.082 0.105 0.180 0.064 0.092 0.011 0.009 0.613 0.163 0.234 0.199 0.052 0.555 0.147 0.096 0.038 0.038 0.186 0.059 0.054 0.146 0.031 0.863 0.414 0.094 0.065 0.040 1.157 0.059 0.543 0.281 0.279 0.122 0.061 0.045 0.041 0.090 0.129 ... 0.108 0.200 0.683 0.122 0.087 0.094 1.450 0.090 0.701 0.185 1.013 0.181 0.092 0.298 0.119 0.154 0.093 0.097 0.106 1.057 0.691 0.119 0.026 0.116 0.790 0.203 0.675 0.439 0.750 0.065 0.247 0.063 0.193 0.165 0.054 0.044 0.197 0.122 0.090 0.134
2012-07-01 01:30:00 0.544 0.060 0.084 0.075 0.220 0.089 0.152 0.023 0.045 0.519 0.163 0.339 0.164 0.057 0.542 0.141 0.098 0.075 0.063 0.176 0.066 0.035 0.133 0.100 0.838 0.264 0.106 0.074 0.048 0.232 0.053 0.276 0.169 0.305 0.025 0.027 0.024 0.040 0.059 0.029 ... 0.103 0.177 0.666 0.137 0.075 0.063 1.841 0.077 0.625 0.169 0.324 0.154 0.055 0.267 0.119 0.188 0.093 0.157 0.081 0.833 0.654 0.088 0.074 0.083 1.146 0.120 0.175 0.041 0.211 0.064 0.193 0.444 0.185 0.104 0.062 0.029 0.273 0.120 0.094 0.100
2012-07-01 02:00:00 0.597 0.059 0.086 0.102 0.171 0.067 0.083 0.024 0.099 0.314 0.207 0.330 0.190 0.066 0.571 0.219 0.097 0.069 0.063 0.172 0.070 0.104 0.133 0.025 0.838 0.215 0.100 0.080 0.033 0.214 0.476 0.285 0.175 0.285 0.087 0.042 0.051 0.040 0.100 0.052 ... 0.121 0.173 0.668 0.089 0.094 0.006 1.074 0.024 0.651 0.157 0.170 0.154 0.050 0.199 0.119 0.135 0.073 0.065 0.106 0.838 0.204 0.137 0.026 0.095 1.049 0.165 0.188 0.039 0.159 0.198 0.141 0.081 0.176 0.130 0.074 0.044 0.206 0.106 0.046 0.123

5 rows × 299 columns

In [ ]:
X_wkn = X_wkn.values
########### Ranging the values from 0 to 1
#scaler = MinMaxScaler(feature_range=(0, 1))
#Xtrain = scaler.fit(Xtrain.reshape(0, 1))
#X_wk_C1 = scaler.fit_transform(X_wk_C1)
#####Avoiding Outliers
#cap = np.percentile(X_wk_C1, 97)   
#X_wk_C1[X_wk_C1 > cap] = cap
In [ ]:
training_size=int(X_wkn.shape[0]*0.80)

test_size=(X_wkn.shape[0])-training_size

train,test=X_wkn[0:training_size],X_wkn[training_size:(X_wkn.shape[0])]
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)

print(trainX.shape, trainY.shape)
print(testX.shape, testY.shape)
(3888, 48, 299) (3888, 48, 299)
(864, 48, 299) (864, 48, 299)
In [ ]:
                             ###Building a sequential network:
Model_wkn = models.Sequential()
Model_wkn.add(layers.Dense(600, activation='relu', input_shape=(trainX.shape[1], trainX.shape[2])))
Model_wkn.add(Dropout(.2))
Model_wkn.add(BatchNormalization())

Model_wkn.add(Dense(300))
Model_wkn.add(Dropout(.2))
Model_wkn.add(BatchNormalization())

Model_wkn.add((Dense(trainX.shape[2])))
Model_wkn.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_wkn.summary()
Model: "sequential_7"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_15 (Dense)             (None, 48, 600)           180000    
_________________________________________________________________
dropout_9 (Dropout)          (None, 48, 600)           0         
_________________________________________________________________
batch_normalization_8 (Batch (None, 48, 600)           2400      
_________________________________________________________________
dense_16 (Dense)             (None, 48, 300)           180300    
_________________________________________________________________
dropout_10 (Dropout)         (None, 48, 300)           0         
_________________________________________________________________
batch_normalization_9 (Batch (None, 48, 300)           1200      
_________________________________________________________________
dense_17 (Dense)             (None, 48, 299)           89999     
=================================================================
Total params: 453,899
Trainable params: 452,099
Non-trainable params: 1,800
_________________________________________________________________
In [ ]:
model_train = Model_wkn.fit(trainX,trainY, epochs=30, validation_split = 0.10, batch_size=64)
Epoch 1/30
55/55 [==============================] - 11s 208ms/step - loss: 0.4457 - mae: 0.4619 - val_loss: 0.0875 - val_mae: 0.1690
Epoch 2/30
55/55 [==============================] - 11s 202ms/step - loss: 0.1920 - mae: 0.2870 - val_loss: 0.0773 - val_mae: 0.1699
Epoch 3/30
55/55 [==============================] - 11s 202ms/step - loss: 0.1417 - mae: 0.2439 - val_loss: 0.0766 - val_mae: 0.1595
Epoch 4/30
55/55 [==============================] - 11s 204ms/step - loss: 0.1139 - mae: 0.2152 - val_loss: 0.0769 - val_mae: 0.1604
Epoch 5/30
55/55 [==============================] - 11s 204ms/step - loss: 0.0965 - mae: 0.1962 - val_loss: 0.0769 - val_mae: 0.1580
Epoch 6/30
55/55 [==============================] - 11s 203ms/step - loss: 0.0846 - mae: 0.1824 - val_loss: 0.0768 - val_mae: 0.1574
Epoch 7/30
55/55 [==============================] - 11s 202ms/step - loss: 0.0763 - mae: 0.1729 - val_loss: 0.0763 - val_mae: 0.1552
Epoch 8/30
55/55 [==============================] - 11s 204ms/step - loss: 0.0701 - mae: 0.1658 - val_loss: 0.0756 - val_mae: 0.1584
Epoch 9/30
55/55 [==============================] - 11s 202ms/step - loss: 0.0659 - mae: 0.1613 - val_loss: 0.0748 - val_mae: 0.1578
Epoch 10/30
55/55 [==============================] - 11s 202ms/step - loss: 0.0626 - mae: 0.1580 - val_loss: 0.0738 - val_mae: 0.1545
Epoch 11/30
55/55 [==============================] - 11s 202ms/step - loss: 0.0602 - mae: 0.1559 - val_loss: 0.0743 - val_mae: 0.1575
Epoch 12/30
55/55 [==============================] - 11s 203ms/step - loss: 0.0583 - mae: 0.1538 - val_loss: 0.0750 - val_mae: 0.1563
Epoch 13/30
55/55 [==============================] - 11s 205ms/step - loss: 0.0566 - mae: 0.1522 - val_loss: 0.0760 - val_mae: 0.1572
Epoch 14/30
55/55 [==============================] - 11s 205ms/step - loss: 0.0551 - mae: 0.1509 - val_loss: 0.0758 - val_mae: 0.1576
Epoch 15/30
55/55 [==============================] - 11s 204ms/step - loss: 0.0539 - mae: 0.1498 - val_loss: 0.0769 - val_mae: 0.1586
Epoch 16/30
55/55 [==============================] - 12s 211ms/step - loss: 0.0528 - mae: 0.1486 - val_loss: 0.0776 - val_mae: 0.1597
Epoch 17/30
55/55 [==============================] - 11s 204ms/step - loss: 0.0518 - mae: 0.1476 - val_loss: 0.0784 - val_mae: 0.1618
Epoch 18/30
55/55 [==============================] - 11s 203ms/step - loss: 0.0510 - mae: 0.1470 - val_loss: 0.0794 - val_mae: 0.1625
Epoch 19/30
55/55 [==============================] - 11s 203ms/step - loss: 0.0501 - mae: 0.1460 - val_loss: 0.0812 - val_mae: 0.1649
Epoch 20/30
55/55 [==============================] - 11s 205ms/step - loss: 0.0495 - mae: 0.1454 - val_loss: 0.0814 - val_mae: 0.1662
Epoch 21/30
55/55 [==============================] - 11s 203ms/step - loss: 0.0491 - mae: 0.1450 - val_loss: 0.0823 - val_mae: 0.1676
Epoch 22/30
55/55 [==============================] - 11s 204ms/step - loss: 0.0487 - mae: 0.1447 - val_loss: 0.0822 - val_mae: 0.1666
Epoch 23/30
55/55 [==============================] - 11s 205ms/step - loss: 0.0483 - mae: 0.1443 - val_loss: 0.0832 - val_mae: 0.1679
Epoch 24/30
55/55 [==============================] - 11s 204ms/step - loss: 0.0479 - mae: 0.1438 - val_loss: 0.0840 - val_mae: 0.1698
Epoch 25/30
55/55 [==============================] - 11s 206ms/step - loss: 0.0473 - mae: 0.1431 - val_loss: 0.0858 - val_mae: 0.1717
Epoch 26/30
55/55 [==============================] - 11s 206ms/step - loss: 0.0472 - mae: 0.1432 - val_loss: 0.0845 - val_mae: 0.1701
Epoch 27/30
55/55 [==============================] - 11s 206ms/step - loss: 0.0467 - mae: 0.1425 - val_loss: 0.0847 - val_mae: 0.1703
Epoch 28/30
55/55 [==============================] - 11s 206ms/step - loss: 0.0464 - mae: 0.1422 - val_loss: 0.0855 - val_mae: 0.1729
Epoch 29/30
55/55 [==============================] - 11s 206ms/step - loss: 0.0463 - mae: 0.1423 - val_loss: 0.0859 - val_mae: 0.1720
Epoch 30/30
55/55 [==============================] - 11s 207ms/step - loss: 0.0459 - mae: 0.1416 - val_loss: 0.0883 - val_mae: 0.1749
In [ ]:
Seq_train = Model_wkn.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_wkn.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
122/122 [==============================] - 4s 29ms/step
[[0.46302012 0.12404956 0.14122486 ... 0.13356787 0.0894635  0.5085786 ]
 [0.4560967  0.11305563 0.09794373 ... 0.08615143 0.06589209 0.41530353]
 [0.45343345 0.08284208 0.09732595 ... 0.0802353  0.39407402 0.5051379 ]
 ...
 [0.66133714 0.19282782 0.06927367 ... 0.20808971 0.26853502 1.0938756 ]
 [0.4023019  0.23461854 0.14063032 ... 0.16701108 0.41498756 1.216478  ]
 [0.21004766 0.19757387 0.06276423 ... 0.11823868 0.31246257 1.4037228 ]] (3888, 48, 299)
27/27 [==============================] - 1s 29ms/step
[[0.2268009  0.06092563 0.17383496 ... 0.11251246 0.37626308 0.8638818 ]
 [0.19290258 0.11127613 0.15497306 ... 0.1240804  0.40921783 0.46602273]
 [0.25915518 0.09353558 0.13214755 ... 0.09686972 0.5005745  0.61418873]
 ...
 [0.43833384 0.09311816 0.1375128  ... 0.19389118 0.39569575 1.1308339 ]
 [0.3879504  0.06402598 0.12870407 ... 0.1902777  0.20151864 0.72345686]
 [0.27754706 0.04570027 0.19781807 ... 0.12521094 0.18023017 0.87640524]] (864, 48, 299)
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY_RMSE, testingtrain_C1))
#print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

#trainMAE = np.mean(mae(trainY[:48,1,1], testingtrain_C2[:48,1,1]))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.40 RMSE
Test Score: 0.24 MAE
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,47,1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,47,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
model = Sequential()
model.add(LSTM(300, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))
model.add(Dropout(.2))


model.add(Dense(trainX.shape[2]))
#sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9),   'optimizers.Adam(lr=0.01)'
model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
Model: "sequential_8"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_3 (LSTM)                (None, 48, 300)           720000    
_________________________________________________________________
dropout_11 (Dropout)         (None, 48, 300)           0         
_________________________________________________________________
dense_18 (Dense)             (None, 48, 299)           89999     
=================================================================
Total params: 809,999
Trainable params: 809,999
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_wkn = model.fit(trainX,trainY, epochs=20,
                       batch_size = 64,
                       validation_split = 0.10, 
                        callbacks=[lr_decay])
Epoch 1/20
55/55 [==============================] - 21s 384ms/step - loss: 0.1397 - mae: 0.2256 - val_loss: 0.0804 - val_mae: 0.1718
Epoch 2/20
55/55 [==============================] - 23s 411ms/step - loss: 0.1050 - mae: 0.1946 - val_loss: 0.0780 - val_mae: 0.1655
Epoch 3/20
55/55 [==============================] - 21s 379ms/step - loss: 0.0948 - mae: 0.1848 - val_loss: 0.0761 - val_mae: 0.1612
Epoch 4/20
55/55 [==============================] - 21s 380ms/step - loss: 0.0898 - mae: 0.1795 - val_loss: 0.0768 - val_mae: 0.1636
Epoch 5/20
55/55 [==============================] - 21s 382ms/step - loss: 0.0865 - mae: 0.1764 - val_loss: 0.0758 - val_mae: 0.1609
Epoch 6/20
55/55 [==============================] - 21s 379ms/step - loss: 0.0841 - mae: 0.1741 - val_loss: 0.0759 - val_mae: 0.1602
Epoch 7/20
55/55 [==============================] - 21s 376ms/step - loss: 0.0823 - mae: 0.1724 - val_loss: 0.0760 - val_mae: 0.1601
Epoch 8/20
55/55 [==============================] - 21s 374ms/step - loss: 0.0809 - mae: 0.1711 - val_loss: 0.0763 - val_mae: 0.1608
Epoch 9/20
55/55 [==============================] - 20s 370ms/step - loss: 0.0797 - mae: 0.1701 - val_loss: 0.0758 - val_mae: 0.1591
Epoch 10/20
55/55 [==============================] - 21s 374ms/step - loss: 0.0788 - mae: 0.1693 - val_loss: 0.0762 - val_mae: 0.1598
Epoch 11/20
55/55 [==============================] - 21s 375ms/step - loss: 0.0781 - mae: 0.1686 - val_loss: 0.0764 - val_mae: 0.1601
Epoch 12/20
55/55 [==============================] - 21s 386ms/step - loss: 0.0776 - mae: 0.1681 - val_loss: 0.0759 - val_mae: 0.1592
Epoch 13/20
55/55 [==============================] - 21s 376ms/step - loss: 0.0772 - mae: 0.1678 - val_loss: 0.0762 - val_mae: 0.1598
Epoch 14/20
55/55 [==============================] - 21s 376ms/step - loss: 0.0768 - mae: 0.1674 - val_loss: 0.0762 - val_mae: 0.1595
Epoch 15/20
55/55 [==============================] - 21s 376ms/step - loss: 0.0765 - mae: 0.1672 - val_loss: 0.0760 - val_mae: 0.1591
Epoch 16/20
55/55 [==============================] - 21s 375ms/step - loss: 0.0762 - mae: 0.1669 - val_loss: 0.0761 - val_mae: 0.1594
Epoch 17/20
55/55 [==============================] - 21s 380ms/step - loss: 0.0761 - mae: 0.1668 - val_loss: 0.0763 - val_mae: 0.1597
Epoch 18/20
55/55 [==============================] - 21s 378ms/step - loss: 0.0759 - mae: 0.1667 - val_loss: 0.0761 - val_mae: 0.1593
Epoch 19/20
55/55 [==============================] - 21s 376ms/step - loss: 0.0758 - mae: 0.1666 - val_loss: 0.0761 - val_mae: 0.1593
Epoch 20/20
55/55 [==============================] - 21s 380ms/step - loss: 0.0757 - mae: 0.1664 - val_loss: 0.0762 - val_mae: 0.1594
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_wkn.history['loss'], label='train')
plt.plot(history_wkn.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_wkn = model.predict(trainX, verbose = 1)
print(testingtrain_wkn[0], testingtrain_wkn.shape)

testingtest_wkn = model.predict(testX, verbose = 1)
print(testingtest_wkn[0], testingtest_wkn.shape)
122/122 [==============================] - 10s 81ms/step
[[0.39401245 0.08022098 0.13908796 ... 0.15003406 0.28197977 0.45969924]
 [0.39824024 0.09697975 0.12224342 ... 0.11700596 0.17682429 0.4218963 ]
 [0.5008894  0.08815616 0.12854931 ... 0.10995527 0.13275695 0.29236957]
 ...
 [0.6754641  0.21431099 0.05143037 ... 0.25436503 0.67987263 0.25195217]
 [0.469725   0.1808229  0.10176004 ... 0.2104466  0.62938863 0.6433329 ]
 [0.27762344 0.13812876 0.09077866 ... 0.15992637 0.44095007 0.69808567]] (3888, 48, 299)
27/27 [==============================] - 2s 78ms/step
[[0.2868322  0.08783158 0.1476909  ... 0.12942706 0.34126076 0.45494902]
 [0.31069982 0.0889969  0.13578753 ... 0.12038192 0.28482857 0.42513132]
 [0.37898144 0.08452217 0.12007014 ... 0.10829186 0.23488067 0.28754917]
 ...
 [0.47476548 0.09776599 0.17449212 ... 0.18161765 0.6496185  0.73410577]
 [0.41659835 0.10518321 0.15169291 ... 0.14697976 0.5559504  0.6635822 ]
 [0.25506386 0.09795811 0.12866506 ... 0.12329003 0.52051663 0.6084588 ]] (864, 48, 299)
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY[:,1,:], testingtrain_C2[:,1,:]))
#print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_wkn[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))


#trainMAE = np.mean(mae(trainY[:,1,:], testingtrain_C3[:,1,:]))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], testingtest_wkn[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.37 RMSE
Test Score: 0.22 MAE
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,1], marker='.', label="actual")
plt.plot(aa, testingtest_wkn[:,1,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()